changeset 26813:b66e3ca0b90c stable 3.6-rc

merge default into stable for code freeze
author Matt Mackall <mpm@selenic.com>
date Tue, 20 Oct 2015 15:59:10 -0500
parents d3712209921d (current diff) 58a309e9cf80 (diff)
children d5eb1f60fcc2
files doc/check-seclevel.py mercurial/templates/static/style-coal.css
diffstat 433 files changed, 13764 insertions(+), 5964 deletions(-) [+]
line wrap: on
line diff
--- a/Makefile	Thu Oct 08 23:24:38 2015 +0900
+++ b/Makefile	Tue Oct 20 15:59:10 2015 -0500
@@ -51,6 +51,9 @@
 build:
 	$(PYTHON) setup.py $(PURE) build $(COMPILER:%=-c %)
 
+wheel:
+	FORCE_SETUPTOOLS=1 $(PYTHON) setup.py $(PURE) bdist_wheel $(COMPILER:%=-c %)
+
 doc:
 	$(MAKE) -C doc
 
@@ -157,14 +160,12 @@
 	N=`cd dist && echo mercurial-*.mpkg | sed 's,\.mpkg$$,,'` && hdiutil create -srcfolder dist/$$N.mpkg/ -scrub -volname "$$N" -ov packages/osx/$$N.dmg
 	rm -rf dist/mercurial-*.mpkg
 
-debian-jessie:
-	mkdir -p packages/debian-jessie
-	contrib/builddeb
-	mv debbuild/*.deb packages/debian-jessie
-	rm -rf debbuild
+deb:
+	mkdir -p packages/debian-unknown
+	contrib/builddeb --release unknown
 
 docker-debian-jessie:
-	mkdir -p packages/debian/jessie
+	mkdir -p packages/debian-jessie
 	contrib/dockerdeb jessie
 
 fedora20:
--- a/README	Thu Oct 08 23:24:38 2015 +0900
+++ b/README	Tue Oct 20 15:59:10 2015 -0500
@@ -16,5 +16,5 @@
  $ make local      # build for inplace usage
  $ ./hg --version  # should show the latest version
 
-See http://mercurial.selenic.com/ for detailed installation
+See https://mercurial-scm.org/ for detailed installation
 instructions, platform-specific notes, and Mercurial user information.
--- a/contrib/Makefile.python	Thu Oct 08 23:24:38 2015 +0900
+++ b/contrib/Makefile.python	Tue Oct 20 15:59:10 2015 -0500
@@ -1,4 +1,4 @@
-PYTHONVER=2.7.9
+PYTHONVER=2.7.10
 PYTHONNAME=python-
 PREFIX=$(HOME)/bin/prefix-$(PYTHONNAME)$(PYTHONVER)
 SYMLINKDIR=$(HOME)/bin
@@ -17,9 +17,9 @@
 	@echo '  symlink   - create a $$SYMLINKDIR/$(PYTHONNAME)$$PYTHONVER symlink'
 	@echo
 	@echo 'Example: create a temporary Python installation:'
-	@echo '  $$ make -f Makefile.python python PYTHONVER=2.4 PREFIX=/tmp/p24'
-	@echo '  $$ /tmp/p24/bin/python -V'
-	@echo '  Python 2.4'
+	@echo '  $$ make -f Makefile.python python PYTHONVER=${PYTHONVER} PREFIX=/tmp/p27'
+	@echo '  $$ /tmp/p27/bin/python -V'
+	@echo '  Python 2.7'
 	@echo
 	@echo 'Some external libraries are required for building Python: zlib bzip2 openssl.'
 	@echo 'Make sure their development packages are installed systemwide.'
@@ -27,7 +27,7 @@
 # debian: apt-get install zlib1g-dev libbz2-dev libssl-dev
 	@echo
 	@echo 'To build a nice collection of interesting Python versions:'
-	@echo '  $$ for v in 2.{4{,.2,.3},5{,.6},6{,.1,.2,.9},7{,.8,.9}}; do'
+	@echo '  $$ for v in 2.{6{,.1,.2,.9},7{,.8,.10}}; do'
 	@echo '    make -f Makefile.python symlink PYTHONVER=$$v || break; done'
 	@echo 'To run a Mercurial test on all these Python versions:'
 	@echo '  $$ for py in `cd ~/bin && ls $(PYTHONNAME)2.*`; do'
@@ -38,7 +38,7 @@
 export LC_ALL=C
 
 python: $(PREFIX)/bin/python docutils
-	printf 'import sys, zlib, bz2, docutils\nif sys.version_info >= (2,6):\n import ssl' | $(PREFIX)/bin/python
+	printf 'import sys, zlib, bz2, docutils, ssl' | $(PREFIX)/bin/python
 
 PYTHON_SRCDIR=Python-$(PYTHONVER)
 PYTHON_SRCFILE=$(PYTHON_SRCDIR).tgz
@@ -49,15 +49,12 @@
 	tar xf $(PYTHON_SRCFILE)
 	# Ubuntu disables SSLv2 the hard way, disable it on old Pythons too
 	-sed -i 's,self.*SSLv2_method(),0;//\0,g' $(PYTHON_SRCDIR)/Modules/_ssl.c
-	# Find multiarch system libraries on Ubuntu with Python 2.4.x
-	# http://lipyrary.blogspot.dk/2011/05/how-to-compile-python-on-ubuntu-1104.html
-	-sed -i "s|lib_dirs = .* \[|\0'/usr/lib/`dpkg-architecture -qDEB_HOST_MULTIARCH`',|g" $(PYTHON_SRCDIR)/setup.py
 	# Find multiarch system libraries on Ubuntu and disable fortify error when setting argv
 	LDFLAGS="-L/usr/lib/`dpkg-architecture -qDEB_HOST_MULTIARCH`"; \
 	BASECFLAGS=-U_FORTIFY_SOURCE; \
 	export LDFLAGS BASECFLAGS; \
 	cd $(PYTHON_SRCDIR) && ./configure --prefix=$(PREFIX) && make all SVNVERSION=pwd && make install
-	printf 'import sys, zlib, bz2\nif sys.version_info >= (2,6):\n import ssl' | $(PREFIX)/bin/python
+	printf 'import sys, zlib, bz2, ssl' | $(PREFIX)/bin/python
 	rm -rf $(PYTHON_SRCDIR)
 
 DOCUTILSVER=0.12
--- a/contrib/builddeb	Thu Oct 08 23:24:38 2015 +0900
+++ b/contrib/builddeb	Tue Oct 20 15:59:10 2015 -0500
@@ -7,13 +7,23 @@
 . $(dirname $0)/packagelib.sh
 
 BUILD=1
-DEBBUILDDIR="$PWD/debbuild"
+CLEANUP=1
+DEBVERSION=jessie
 while [ "$1" ]; do
     case "$1" in
-    --prepare )
+    --release )
+        shift
+        DEBVERSION="$1"
+        shift
+        ;;
+    --cleanup )
         shift
         BUILD=
         ;;
+    --build )
+        shift
+        CLEANUP=
+        ;;
     --debbuilddir )
         shift
         DEBBUILDDIR="$1"
@@ -26,10 +36,9 @@
     esac
 done
 
-set -u
+trap "if [ '$CLEANUP' ] ; then rm -r '$PWD/debian' ; fi" EXIT
 
-rm -rf $DEBBUILDDIR
-mkdir -p $DEBBUILDDIR
+set -u
 
 if [ ! -d .hg ]; then
     echo 'You are not inside a Mercurial repository!' 1>&2
@@ -38,25 +47,38 @@
 
 gethgversion
 
-cp -r $PWD/contrib/debian $DEBBUILDDIR/DEBIAN
-chmod -R 0755 $DEBBUILDDIR/DEBIAN
-
-control=$DEBBUILDDIR/DEBIAN/control
-
-# This looks like sed -i, but sed -i behaves just differently enough
-# between BSD and GNU sed that I gave up and did the dumb thing.
-sed "s/__VERSION__/$version/" < $control > $control.tmp
-mv $control.tmp $control
+control=debian/control
+changelog=debian/changelog
 
 if [ "$BUILD" ]; then
-    dpkg-deb --build $DEBBUILDDIR
-    mv $DEBBUILDDIR.deb $DEBBUILDDIR/mercurial-$version-$release.deb
-    if [ $? = 0 ]; then
-        echo
-        echo "Built packages for $version-$release:"
-        find $DEBBUILDDIR/ -type f -newer $control
+    if [ -d debian ] ; then
+        echo "Error! debian control directory already exists!"
+        exit 1
     fi
-else
-    echo "Prepared sources for $version-$release $control are in $DEBBUILDDIR - use like:"
-    echo "dpkg-deb --build $DEBBUILDDIR"
+
+    cp -r $PWD/contrib/debian debian
+    chmod -R 0755 debian
+
+    # This looks like sed -i, but sed -i behaves just differently enough
+    # between BSD and GNU sed that I gave up and did the dumb thing.
+    sed "s/__VERSION__/$version/" < $changelog > $changelog.tmp
+    date=$(date --rfc-2822)
+    sed "s/__DATE__/$date/" < $changelog.tmp > $changelog
+    rm $changelog.tmp
+
+    debuild -us -uc -b
+    if [ $? != 0 ]; then
+        echo 'debuild failed!'
+        exit 1
+    fi
+
 fi
+if [ "$CLEANUP" ] ; then
+    echo
+    OUTPUTDIR=${OUTPUTDIR:=packages/debian-$DEBVERSION}
+    find ../mercurial*.deb ../mercurial_*.build ../mercurial_*.changes \
+          -type f -newer $control -print0 | \
+      xargs -Inarf -0 mv narf "$OUTPUTDIR"
+    echo "Built packages for $version-$release:"
+    find "$OUTPUTDIR" -type f -newer $control -name '*.deb'
+fi
--- a/contrib/buildrpm	Thu Oct 08 23:24:38 2015 +0900
+++ b/contrib/buildrpm	Tue Oct 20 15:59:10 2015 -0500
@@ -19,8 +19,8 @@
         ;;
     --withpython | --with-python)
         shift
-        PYTHONVER=2.7.9
-        PYTHONMD5=5eebcaa0030dc4061156d3429657fb83
+        PYTHONVER=2.7.10
+        PYTHONMD5=d7547558fd673bd9d38e2108c6b42521
         ;;
     --rpmbuilddir )
         shift
@@ -56,6 +56,7 @@
     RPMPYTHONVER=%{nil}
 fi
 
+mkdir -p $RPMBUILDDIR/SOURCES
 $HG archive -t tgz $RPMBUILDDIR/SOURCES/mercurial-$version-$release.tar.gz
 if [ "$PYTHONVER" ]; then
 (
@@ -79,6 +80,7 @@
 )
 fi
 
+mkdir -p $RPMBUILDDIR/SPECS
 rpmspec=$RPMBUILDDIR/SPECS/mercurial.spec
 
 sed -e "s,^Version:.*,Version: $version," \
--- a/contrib/check-code.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/contrib/check-code.py	Tue Oct 20 15:59:10 2015 -0500
@@ -123,7 +123,9 @@
     (r'sed (-e )?\'(\d+|/[^/]*/)i(?!\\\n)',
      "put a backslash-escaped newline after sed 'i' command"),
     (r'^diff *-\w*u.*$\n(^  \$ |^$)', "prefix diff -u with cmp"),
-    (r'seq ', "don't use 'seq', use $TESTDIR/seq.py")
+    (r'seq ', "don't use 'seq', use $TESTDIR/seq.py"),
+    (r'\butil\.Abort\b', "directly use error.Abort"),
+    (r'\|&', "don't use |&, use 2>&1"),
   ],
   # warnings
   [
@@ -197,7 +199,6 @@
 
 pypats = [
   [
-    (r'\([^)]*\*\w[^()]+\w+=', "can't pass varargs with keyword in Py2.5"),
     (r'^\s*def\s*\w+\s*\(.*,\s*\(',
      "tuple parameter unpacking not available in Python 3+"),
     (r'lambda\s*\(.*,.*\)',
@@ -291,6 +292,9 @@
     (r'os\.path\.join\(.*, *(""|\'\')\)',
      "use pathutil.normasprefix(path) instead of os.path.join(path, '')"),
     (r'\s0[0-7]+\b', 'legacy octal syntax; use "0o" prefix instead of "0"'),
+    # XXX only catch mutable arguments on the first line of the definition
+    (r'def.*[( ]\w+=\{\}', "don't use mutable default arguments"),
+    (r'\butil\.Abort\b', "directly use error.Abort"),
   ],
   # warnings
   [
--- a/contrib/check-commit	Thu Oct 08 23:24:38 2015 +0900
+++ b/contrib/check-commit	Tue Oct 20 15:59:10 2015 -0500
@@ -13,7 +13,7 @@
 #
 # $ BYPASS= hg commit
 #
-# See also: http://mercurial.selenic.com/wiki/ContributingChanges
+# See also: https://mercurial-scm.org/wiki/ContributingChanges
 
 import re, sys, os
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/debian/cacerts.rc	Tue Oct 20 15:59:10 2015 -0500
@@ -0,0 +1,5 @@
+# This config file points Mercurial at the system-wide certificate
+# store from the ca-certificates package.
+
+[web]
+cacerts = /etc/ssl/certs/ca-certificates.crt
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/debian/changelog	Tue Oct 20 15:59:10 2015 -0500
@@ -0,0 +1,5 @@
+mercurial (__VERSION__) unstable; urgency=medium
+
+  * Automated build performed by upstream.
+
+ -- Mercurial Devel <mercurial-devel@selenic.com>  __DATE__
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/debian/compat	Tue Oct 20 15:59:10 2015 -0500
@@ -0,0 +1,1 @@
+9
--- a/contrib/debian/control	Thu Oct 08 23:24:38 2015 +0900
+++ b/contrib/debian/control	Tue Oct 20 15:59:10 2015 -0500
@@ -1,9 +1,47 @@
-Package: mercurial
-Version: __VERSION__
+Source: mercurial
 Section: vcs
 Priority: optional
+Maintainer: Mercurial Developers <mercurial-devel@selenic.com>
+Build-Depends:
+ debhelper (>= 7),
+ dh-python,
+ python-all
+Standards-Version: 3.9.4
+X-Python-Version: >= 2.6
+
+Package: mercurial
+Depends:
+ python,
+ ${shlibs:Depends},
+ ${misc:Depends},
+ ${python:Depends},
+ mercurial-common (= ${source:Version})
+Architecture: any
+Description: fast, easy to use, distributed revision control tool.
+ Mercurial is a fast, lightweight Source Control Management system designed
+ for efficient handling of very large distributed projects.
+ .
+ Its features include:
+  * O(1) delta-compressed file storage and retrieval scheme
+  * Complete cross-indexing of files and changesets for efficient exploration
+    of project history
+  * Robust SHA1-based integrity checking and append-only storage model
+  * Decentralized development model with arbitrary merging between trees
+  * Easy-to-use command-line interface
+  * Integrated stand-alone web interface
+  * Small Python codebase
+
+Package: mercurial-common
 Architecture: all
-Depends: python
-Conflicts: mercurial-common
-Maintainer: Mercurial Developers <mercurial-devel@selenic.com>
-Description: Mercurial (probably nightly) package built by upstream.
+Depends:
+ ${misc:Depends},
+ ${python:Depends},
+Recommends: mercurial (= ${source:Version}), ca-certificates
+Breaks: mercurial (<< ${source:Version})
+Replaces: mercurial (<< 2.6.3)
+Description: easy-to-use, scalable distributed version control system (common files)
+ Mercurial is a fast, lightweight Source Control Management system designed
+ for efficient handling of very large distributed projects.
+ .
+ This package contains the architecture independent components of Mercurial,
+ and is generally useless without the mercurial package.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/debian/copyright	Tue Oct 20 15:59:10 2015 -0500
@@ -0,0 +1,27 @@
+Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
+Upstream-Name: mercurial
+Source: http://www.selenic.com/mercurial/
+
+Files: *
+Copyright: 2005-2015, Matt Mackall <mpm@selenic.com> and others.
+License: GPL-2+
+ This program is free software; you can redistribute it
+ and/or modify it under the terms of the GNU General Public
+ License as published by the Free Software Foundation; either
+ version 2 of the License, or (at your option) any later
+ version.
+ .
+ This program is distributed in the hope that it will be
+ useful, but WITHOUT ANY WARRANTY; without even the implied
+ warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+ PURPOSE.  See the GNU General Public License for more
+ details.
+ .
+ You should have received a copy of the GNU General Public
+ License along with this package; if not, write to the Free
+ Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ Boston, MA  02110-1301 USA
+ .
+ On Debian systems, the full text of the GNU General Public
+ License version 2 can be found in the file
+ `/usr/share/common-licenses/GPL-2'.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/debian/hgkpath.rc	Tue Oct 20 15:59:10 2015 -0500
@@ -0,0 +1,2 @@
+[hgk]
+path = /usr/share/mercurial/hgk
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/debian/rules	Tue Oct 20 15:59:10 2015 -0500
@@ -0,0 +1,36 @@
+#!/usr/bin/make -f
+# Uncomment this to turn on verbose mode.
+# export DH_VERBOSE=1
+
+CPUS=$(shell cat /proc/cpuinfo | grep -E ^processor | wc -l)
+
+%:
+	dh $@ --with python2
+
+override_dh_auto_test:
+	http_proxy='' dh_auto_test -- TESTFLAGS="-j$(CPUS)"
+
+override_dh_python2:
+	dh_python2
+	find debian/mercurial/usr/share -type d -empty -delete
+
+override_dh_install:
+	python$(PYVERS) setup.py install --root $(CURDIR)/debian/mercurial --install-layout=deb
+	# remove arch-independent python stuff
+	find $(CURDIR)/debian/mercurial/usr/lib \
+		! -name '*.so' ! -type d -delete , \
+		-type d -empty -delete
+	python$(PYVERS) setup.py install --root $(CURDIR)/debian/mercurial-common --install-layout=deb
+	make install-doc PREFIX=$(CURDIR)/debian/mercurial-common/usr
+	# remove arch-dependent python stuff
+	find $(CURDIR)/debian/mercurial-common/usr/lib \
+		-name '*.so' ! -type d -delete , \
+		-type d -empty -delete
+	cp contrib/hg-ssh $(CURDIR)/debian/mercurial-common/usr/bin
+	mkdir -p $(CURDIR)/debian/mercurial-common/usr/share/mercurial
+	cp contrib/hgk $(CURDIR)/debian/mercurial-common/usr/share/mercurial
+	mkdir -p $(CURDIR)/debian/mercurial-common/etc/mercurial/hgrc.d/
+	cp contrib/debian/*.rc $(CURDIR)/debian/mercurial-common/etc/mercurial/hgrc.d/
+	mkdir -p $(CURDIR)/debian/mercurial-common/usr/share/bash-completion/completions
+	cp contrib/bash_completion $(CURDIR)/debian/mercurial-common/usr/share/bash-completion/completions/hg
+	rm $(CURDIR)/debian/mercurial-common/usr/bin/hg
--- a/contrib/dockerdeb	Thu Oct 08 23:24:38 2015 +0900
+++ b/contrib/dockerdeb	Tue Oct 20 15:59:10 2015 -0500
@@ -8,32 +8,27 @@
 
 checkdocker
 
+DEBPLATFORM="$1"
 PLATFORM="debian-$1"
 shift # extra params are passed to build process
 
+OUTPUTDIR=${OUTPUTDIR:=$ROOTDIR/packages/$PLATFORM}
+
 initcontainer $PLATFORM
 
-DEBBUILDDIR=$ROOTDIR/packages/$PLATFORM
-contrib/builddeb --debbuilddir $DEBBUILDDIR/staged --prepare
+# debuild only appears to be able to save built debs etc to .., so we
+# have to share the .. of the current directory with the docker
+# container and hope it's writable. Whee.
+dn=$(basename $PWD)
 
-DSHARED=/mnt/shared/
 if [ $(uname) = "Darwin" ] ; then
-    $DOCKER run -u $DBUILDUSER --rm -v $DEBBUILDDIR:$DSHARED -v $PWD:/mnt/hg $CONTAINER \
-            sh -c "cd /mnt/hg && make clean && make local"
+    $DOCKER run -u $DBUILDUSER --rm -v $PWD/..:/mnt $CONTAINER \
+            sh -c "cd /mnt/$dn && make clean && make local"
 fi
-$DOCKER run -u $DBUILDUSER --rm -v $DEBBUILDDIR:$DSHARED -v $PWD:/mnt/hg $CONTAINER \
-  sh -c "cd /mnt/hg && make PREFIX=$DSHARED/staged/usr install"
-$DOCKER run -u $DBUILDUSER --rm -v $DEBBUILDDIR:$DSHARED $CONTAINER \
-  dpkg-deb --build $DSHARED/staged
+$DOCKER run -u $DBUILDUSER --rm -v $PWD/..:/mnt $CONTAINER \
+  sh -c "cd /mnt/$dn && DEB_BUILD_OPTIONS='${DEB_BUILD_OPTIONS:=}' contrib/builddeb --build --release $DEBPLATFORM"
+contrib/builddeb --cleanup --release $DEBPLATFORM
 if [ $(uname) = "Darwin" ] ; then
-    $DOCKER run -u $DBUILDUSER --rm -v $DEBBUILDDIR:$DSHARED -v $PWD:/mnt/hg $CONTAINER \
-            sh -c "cd /mnt/hg && make clean"
+    $DOCKER run -u $DBUILDUSER --rm -v $PWD/..:/mnt $CONTAINER \
+            sh -c "cd /mnt/$dn && make clean"
 fi
-
-gethgversion
-
-rm -r $DEBBUILDDIR/staged
-mv $DEBBUILDDIR/staged.deb $DEBBUILDDIR/mercurial-$version-$release.deb
-
-echo
-echo "Build complete - results can be found in $DEBBUILDDIR"
--- a/contrib/editmerge	Thu Oct 08 23:24:38 2015 +0900
+++ b/contrib/editmerge	Tue Oct 20 15:59:10 2015 -0500
@@ -10,23 +10,23 @@
 # editmerge.check=changed
 # editmerge.premerge=keep
 
-FILE=$1
+FILE="$1"
 
 getlines() {
-  grep -n "<<<<<<" $FILE | cut -f1 -d:
+  grep -n "^<<<<<<" "$FILE" | cut -f1 -d:
 }
 
-# editor preference loosely based on http://mercurial.selenic.com/wiki/editor
+# editor preference loosely based on https://mercurial-scm.org/wiki/editor
 # hg showconfig is at the bottom though, since it's slow to run (0.15 seconds)
-ED=$HGEDITOR
+ED="$HGEDITOR"
 if [ "$ED" = "" ] ; then
-  ED=$VISUAL
+  ED="$VISUAL"
 fi
 if [ "$ED" = "" ] ; then
-  ED=$EDITOR
+  ED="$EDITOR"
 fi
 if [ "$ED" = "" ] ; then
-  ED=$(hg showconfig ui.editor)
+  ED="$(hg showconfig ui.editor)"
 fi
 if [ "$ED" = "" ] ; then
   echo "merge failed - unable to find editor"
@@ -34,22 +34,22 @@
 fi
 
 if [ "$ED" = "emacs" ] || [ "$ED" = "nano" ] || [ "$ED" = "vim" ] ; then
-  FIRSTLINE=$(getlines | head -n 1)
+  FIRSTLINE="$(getlines | head -n 1)"
   PREVIOUSLINE=""
 
   # open the editor to the first conflict until there are no more
   # or the user stops editing the file
   while [ ! "$FIRSTLINE" = "" ] && [ ! "$FIRSTLINE" = "$PREVIOUSLINE" ] ; do
-    $ED +$FIRSTLINE $FILE
-    PREVIOUSLINE=$FIRSTLINE
-    FIRSTLINE=$(getlines | head -n 1)
+    $ED "+$FIRSTLINE" "$FILE"
+    PREVIOUSLINE="$FIRSTLINE"
+    FIRSTLINE="$(getlines | head -n 1)"
   done
 else
-  $ED $FILE
+  $ED "$FILE"
 fi
 
 # get the line numbers of the remaining conflicts
-CONFLICTS=$(getlines | sed ':a;N;$!ba;s/\n/, /g')
+CONFLICTS="$(getlines | sed ':a;N;$!ba;s/\n/, /g')"
 if [ ! "$CONFLICTS" = "" ] ; then
   echo "merge failed - resolve the conflicts (line $CONFLICTS) then use 'hg resolve --mark'"
   exit 1
--- a/contrib/hgweb.wsgi	Thu Oct 08 23:24:38 2015 +0900
+++ b/contrib/hgweb.wsgi	Tue Oct 20 15:59:10 2015 -0500
@@ -1,5 +1,5 @@
 # An example WSGI for use with mod_wsgi, edit as necessary
-# See http://mercurial.selenic.com/wiki/modwsgi for more information
+# See https://mercurial-scm.org/wiki/modwsgi for more information
 
 # Path to repo or hgweb config to serve (see 'hg help hgweb')
 config = "/path/to/repo/or/config"
--- a/contrib/import-checker.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/contrib/import-checker.py	Tue Oct 20 15:59:10 2015 -0500
@@ -65,7 +65,7 @@
     Mercurial specific) modules.
 
     This function assumes that module names not existing in
-    `localmods` are ones of Python standard libarary.
+    `localmods` are from the Python standard library.
 
     This function returns the function, which takes `name` argument,
     and returns `(absname, dottedpath, hassubmod)` tuple if `name`
@@ -164,7 +164,7 @@
     for m in ['msvcrt', '_winreg']:
         yield m
     # These get missed too
-    for m in 'ctypes', 'email':
+    for m in 'ctypes', 'email', 'multiprocessing':
         yield m
     yield 'builtins' # python3 only
     for m in 'fcntl', 'grp', 'pwd', 'termios':  # Unix only
@@ -200,8 +200,7 @@
             for name in files:
                 if name == '__init__.py':
                     continue
-                if not (name.endswith('.py') or name.endswith('.so')
-                        or name.endswith('.pyd')):
+                if not name.endswith(('.py', '.so', '.pyc', '.pyo', '.pyd')):
                     continue
                 full_path = os.path.join(top, name)
                 rel_path = full_path[len(libpath) + 1:]
--- a/contrib/macosx/Readme.html	Thu Oct 08 23:24:38 2015 +0900
+++ b/contrib/macosx/Readme.html	Tue Oct 20 15:59:10 2015 -0500
@@ -25,7 +25,7 @@
 <p class="p2"><br></p>
 <p class="p1"><b>Documentation</b></p>
 <p class="p2"><br></p>
-<p class="p3">Visit the <a href="http://mercurial.selenic.com/">Mercurial web site and wiki</a></p>
+<p class="p3">Visit the <a href="https://mercurial-scm.org/">Mercurial web site and wiki</a></p>
 <p class="p2"><br></p>
 <p class="p3">There's also a free book, <a href="http://hgbook.red-bean.com/">Distributed revision control with Mercurial</a></p>
 <p class="p2"><br></p>
--- a/contrib/macosx/Welcome.html	Thu Oct 08 23:24:38 2015 +0900
+++ b/contrib/macosx/Welcome.html	Tue Oct 20 15:59:10 2015 -0500
@@ -11,10 +11,10 @@
   </style>
 </head>
 <body>
-<p class="p1">This is a prepackaged release of <a href="http://mercurial.selenic.com/">Mercurial</a> for Mac OS X.</p>
+<p class="p1">This is a prepackaged release of <a href="https://mercurial-scm.org/">Mercurial</a> for Mac OS X.</p>
 <p class="p2"><br></p>
 <br>
 <p>
-Please be sure to read the latest <a href="http://mercurial.selenic.com/wiki/WhatsNew">release notes</a>.</p>
+Please be sure to read the latest <a href="https://mercurial-scm.org/wiki/WhatsNew">release notes</a>.</p>
 </body>
 </html>
--- a/contrib/mercurial.spec	Thu Oct 08 23:24:38 2015 +0900
+++ b/contrib/mercurial.spec	Tue Oct 20 15:59:10 2015 -0500
@@ -25,7 +25,7 @@
 Release: 0
 License: GPLv2+
 Group: Development/Tools
-URL: http://mercurial.selenic.com/
+URL: https://mercurial-scm.org/
 Source0: %{name}-%{version}-%{release}.tar.gz
 %if "%{?withpython}"
 Source1: %{pythonname}.tgz
--- a/contrib/perf.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/contrib/perf.py	Tue Oct 20 15:59:10 2015 -0500
@@ -222,7 +222,7 @@
     "a" in ds
     def d():
         ds._dirty = True
-        ds.write()
+        ds.write(repo.currenttransaction())
     timer(d)
     fm.end()
 
--- a/contrib/plan9/README	Thu Oct 08 23:24:38 2015 +0900
+++ b/contrib/plan9/README	Tue Oct 20 15:59:10 2015 -0500
@@ -35,5 +35,5 @@
 A proto(2) file is included in this directory as an example of how a
 binary distribution could be packaged, ostensibly with contrib(1).
 
-See http://mercurial.selenic.com/ for detailed installation
+See https://mercurial-scm.org/ for detailed installation
 instructions, platform-specific notes, and Mercurial user information.
--- a/contrib/revsetbenchmarks.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/contrib/revsetbenchmarks.py	Tue Oct 20 15:59:10 2015 -0500
@@ -33,6 +33,8 @@
     """update the repo to a revision"""
     try:
         check_call(['hg', 'update', '--quiet', '--check', str(rev)])
+        check_output(['make', 'local'],
+                     stderr=None)  # suppress output except for error/warning
     except CalledProcessError as exc:
         print >> sys.stderr, 'update to revision %s failed, aborting' % rev
         sys.exit(exc.returncode)
@@ -59,7 +61,7 @@
     except CalledProcessError as exc:
         print >> sys.stderr, 'abort: cannot run revset benchmark: %s' % exc.cmd
         if exc.output is None:
-            print >> sys.stderr, '(no ouput)'
+            print >> sys.stderr, '(no output)'
         else:
             print >> sys.stderr, exc.output
         return None
@@ -112,7 +114,7 @@
 def getfactor(main, other, field, sensitivity=0.05):
     """return the relative factor between values for 'field' in main and other
 
-    Return None if the factor is insignicant (less than <sensitivity>
+    Return None if the factor is insignificant (less than <sensitivity>
     variation)."""
     factor = 1
     if main is not None:
@@ -216,7 +218,7 @@
 
 helptext="""This script will run multiple variants of provided revsets using
 different revisions in your mercurial repository. After the benchmark are run
-summary output is provided. Use itto demonstrate speed improvements or pin
+summary output is provided. Use it to demonstrate speed improvements or pin
 point regressions. Revsets to run are specified in a file (or from stdin), one
 revsets per line. Line starting with '#' will be ignored, allowing insertion of
 comments."""
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/showstack.py	Tue Oct 20 15:59:10 2015 -0500
@@ -0,0 +1,17 @@
+# showstack.py - extension to dump a Python stack trace on signal
+#
+# binds to both SIGQUIT (Ctrl-\) and SIGINFO (Ctrl-T on BSDs)
+
+import sys, signal, traceback
+
+def sigshow(*args):
+    sys.stderr.write("\n")
+    traceback.print_stack(args[1], limit=10, file=sys.stderr)
+    sys.stderr.write("----\n")
+
+def extsetup(ui):
+    signal.signal(signal.SIGQUIT, sigshow)
+    try:
+        signal.signal(signal.SIGINFO, sigshow)
+    except AttributeError:
+        pass
--- a/contrib/simplemerge	Thu Oct 08 23:24:38 2015 +0900
+++ b/contrib/simplemerge	Tue Oct 20 15:59:10 2015 -0500
@@ -5,7 +5,7 @@
 
 import sys
 from mercurial.i18n import _
-from mercurial import simplemerge, fancyopts, util, ui
+from mercurial import error, simplemerge, fancyopts, util, ui
 
 options = [('L', 'label', [], _('labels to use on conflict markers')),
            ('a', 'text', None, _('treat all files as text')),
@@ -59,7 +59,7 @@
     sys.stdout.write("%s: %s\n" % (sys.argv[0], e))
     showhelp()
     sys.exit(1)
-except util.Abort, e:
+except error.Abort, e:
     sys.stderr.write("abort: %s\n" % e)
     sys.exit(255)
 except KeyboardInterrupt:
--- a/contrib/synthrepo.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/contrib/synthrepo.py	Tue Oct 20 15:59:10 2015 -0500
@@ -37,7 +37,7 @@
 '''
 
 import bisect, collections, itertools, json, os, random, time, sys
-from mercurial import cmdutil, context, patch, scmutil, util, hg
+from mercurial import cmdutil, context, patch, scmutil, util, hg, error
 from mercurial.i18n import _
 from mercurial.node import nullrev, nullid, short
 
@@ -254,7 +254,7 @@
     try:
         fp = hg.openpath(ui, descpath)
     except Exception as err:
-        raise util.Abort('%s: %s' % (descpath, err[0].strerror))
+        raise error.Abort('%s: %s' % (descpath, err[0].strerror))
     desc = json.load(fp)
     fp.close()
 
@@ -286,7 +286,7 @@
     try:
         fp = open(dictfile, 'rU')
     except IOError as err:
-        raise util.Abort('%s: %s' % (dictfile, err.strerror))
+        raise error.Abort('%s: %s' % (dictfile, err.strerror))
     words = fp.read().splitlines()
     fp.close()
 
--- a/contrib/vagrant/Vagrantfile	Thu Oct 08 23:24:38 2015 +0900
+++ b/contrib/vagrant/Vagrantfile	Tue Oct 20 15:59:10 2015 -0500
@@ -1,9 +1,8 @@
 # -*- mode: ruby -*-
 
 Vagrant.configure('2') do |config|
-  # Debian 7.4 32-bit i386 without configuration management software
-  config.vm.box = "puppetlabs/debian-7.4-32-nocm"
-  #config.vm.box = "pnd/debian-wheezy32-basebox"
+  # Debian 8.1 x86_64 without configuration management software
+  config.vm.box = "debian/jessie64"
   config.vm.hostname = "tests"
 
   config.vm.define "tests" do |conf|
--- a/contrib/vim/hgcommand.vim	Thu Oct 08 23:24:38 2015 +0900
+++ b/contrib/vim/hgcommand.vim	Tue Oct 20 15:59:10 2015 -0500
@@ -1226,7 +1226,7 @@
 
 Author:  Mathieu Clabaut <mathieu.clabaut@gmail.com>
 Credits:  Bob Hiestand <bob.hiestand@gmail.com>
-Mercurial: http://mercurial.selenic.com/
+Mercurial: https://mercurial-scm.org/
    Mercurial (noted Hg) is a fast, lightweight Source Control Management
    system designed for efficient handling of very large distributed projects.
 
--- a/contrib/vim/patchreview.txt	Thu Oct 08 23:24:38 2015 +0900
+++ b/contrib/vim/patchreview.txt	Tue Oct 20 15:59:10 2015 -0500
@@ -30,7 +30,7 @@
 software development projects. This plugin provides that missing
 functionality.
 
-It also improves on |:diffpatch|'s behaviour of creating the patched files in
+It also improves on |:diffpatch|'s behavior of creating the patched files in
 the same directory as original file which can lead to project workspace
 pollution.
 
--- a/contrib/win32/ReadMe.html	Thu Oct 08 23:24:38 2015 +0900
+++ b/contrib/win32/ReadMe.html	Tue Oct 20 15:59:10 2015 -0500
@@ -49,7 +49,7 @@
 
     <p>
       For documentation, please visit the <a
-      href="http://mercurial.selenic.com/">Mercurial web site</a>.
+      href="https://mercurial-scm.org/">Mercurial web site</a>.
       You can also download a free book, <a
       href="http://hgbook.red-bean.com/">Mercurial: The Definitive
       Guide</a>.
@@ -100,7 +100,7 @@
       By default, Mercurial will use the merge program defined by the
       <tt>HGMERGE</tt> environment variable, or uses the one defined
       in the <tt>mercurial.ini</tt> file. (see <a
-      href="http://mercurial.selenic.com/wiki/MergeProgram">MergeProgram</a>
+      href="https://mercurial-scm.org/wiki/MergeProgram">MergeProgram</a>
       on the Mercurial Wiki for more information)
     </p>
 
@@ -108,9 +108,9 @@
 
     <p>
       Before you report any problems, please consult the <a
-      href="http://mercurial.selenic.com/">Mercurial web site</a>
+      href="https://mercurial-scm.org/">Mercurial web site</a>
       and see if your question is already in our list of <a
-      href="http://mercurial.selenic.com/wiki/FAQ">Frequently
+      href="https://mercurial-scm.org/wiki/FAQ">Frequently
       Answered Questions</a> (the "FAQ").
     </p>
 
--- a/contrib/win32/mercurial.iss	Thu Oct 08 23:24:38 2015 +0900
+++ b/contrib/win32/mercurial.iss	Tue Oct 20 15:59:10 2015 -0500
@@ -36,9 +36,9 @@
 LicenseFile=COPYING
 ShowLanguageDialog=yes
 AppPublisher=Matt Mackall and others
-AppPublisherURL=http://mercurial.selenic.com/
-AppSupportURL=http://mercurial.selenic.com/
-AppUpdatesURL=http://mercurial.selenic.com/
+AppPublisherURL=https://mercurial-scm.org/
+AppSupportURL=https://mercurial-scm.org/
+AppUpdatesURL=https://mercurial-scm.org/
 AppID={{4B95A5F1-EF59-4B08-BED8-C891C46121B3}
 AppContact=mercurial@selenic.com
 DefaultDirName={pf}\Mercurial
@@ -90,7 +90,7 @@
 Source: COPYING; DestDir: {app}; DestName: Copying.txt
 
 [INI]
-Filename: {app}\Mercurial.url; Section: InternetShortcut; Key: URL; String: http://mercurial.selenic.com/
+Filename: {app}\Mercurial.url; Section: InternetShortcut; Key: URL; String: https://mercurial-scm.org/
 Filename: {app}\default.d\editor.rc; Section: ui; Key: editor; String: notepad
 
 [UninstallDelete]
--- a/contrib/win32/postinstall.txt	Thu Oct 08 23:24:38 2015 +0900
+++ b/contrib/win32/postinstall.txt	Tue Oct 20 15:59:10 2015 -0500
@@ -6,4 +6,4 @@
 
 Also check the release notes at:
 
- http://mercurial.selenic.com/wiki/WhatsNew
+ https://mercurial-scm.org/wiki/WhatsNew
--- a/contrib/win32/win32-build.txt	Thu Oct 08 23:24:38 2015 +0900
+++ b/contrib/win32/win32-build.txt	Tue Oct 20 15:59:10 2015 -0500
@@ -78,9 +78,6 @@
 
 Copy add_path.exe and cacert.pem files into the dist directory that just got created.
 
-If you are using Python up to version 2.5.4, copy mfc71.dll into the dist
-directory that just got created.
-
 If you are using Python 2.6 or later, or if you are using MSVC 2008 to compile
 mercurial, you must include the C runtime libraries in the installer. To do so,
 install the Visual C++ 2008 redistributable package. Then in your windows\winsxs
--- a/contrib/wix/mercurial.wxs	Thu Oct 08 23:24:38 2015 +0900
+++ b/contrib/wix/mercurial.wxs	Tue Oct 20 15:59:10 2015 -0500
@@ -44,10 +44,10 @@
 
     <!--Property Id='ARPCOMMENTS'>any comments</Property-->
     <Property Id='ARPCONTACT'>mercurial@selenic.com</Property>
-    <Property Id='ARPHELPLINK'>http://mercurial.selenic.com/wiki/</Property>
-    <Property Id='ARPURLINFOABOUT'>http://mercurial.selenic.com/about/</Property>
-    <Property Id='ARPURLUPDATEINFO'>http://mercurial.selenic.com/downloads/</Property>
-    <Property Id='ARPHELPTELEPHONE'>http://mercurial.selenic.com/wiki/Support</Property>
+    <Property Id='ARPHELPLINK'>https://mercurial-scm.org/wiki/</Property>
+    <Property Id='ARPURLINFOABOUT'>https://mercurial-scm.org/about/</Property>
+    <Property Id='ARPURLUPDATEINFO'>https://mercurial-scm.org/downloads/</Property>
+    <Property Id='ARPHELPTELEPHONE'>https://mercurial-scm.org/wiki/Support</Property>
     <Property Id='ARPPRODUCTICON'>hgIcon.ico</Property>
 
     <Property Id='INSTALLEDMERCURIALPRODUCTS' Secure='yes'></Property>
--- a/doc/check-seclevel.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/doc/check-seclevel.py	Tue Oct 20 15:59:10 2015 -0500
@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 #
-# checkseclevel - checking section title levels in each online help documents
+# checkseclevel - checking section title levels in each online help document
 
 import sys, os
 import optparse
@@ -14,15 +14,7 @@
 from mercurial.help import helptable
 from mercurial import extensions
 from mercurial import minirst
-
-_verbose = False
-
-def verbose(msg):
-    if _verbose:
-        print msg
-
-def error(msg):
-    sys.stderr.write('%s\n' % msg)
+from mercurial import ui as uimod
 
 level2mark = ['"', '=', '-', '.', '#']
 reservedmarks = ['"']
@@ -37,12 +29,12 @@
 initlevel_ext = 1
 initlevel_ext_cmd = 3
 
-def showavailables(initlevel):
-    error('    available marks and order of them in this help: %s' %
-          (', '.join(['%r' % (m * 4) for m in level2mark[initlevel + 1:]])))
+def showavailables(ui, initlevel):
+    ui.warn(('    available marks and order of them in this help: %s\n') %
+            (', '.join(['%r' % (m * 4) for m in level2mark[initlevel + 1:]])))
 
-def checkseclevel(doc, name, initlevel):
-    verbose('checking "%s"' % name)
+def checkseclevel(ui, doc, name, initlevel):
+    ui.note(('checking "%s"\n') % name)
     blocks, pruned = minirst.parse(doc, 0, ['verbose'])
     errorcnt = 0
     curlevel = initlevel
@@ -52,66 +44,66 @@
         mark = block['underline']
         title = block['lines'][0]
         if (mark not in mark2level) or (mark2level[mark] <= initlevel):
-            error('invalid section mark %r for "%s" of %s' %
-                  (mark * 4, title, name))
-            showavailables(initlevel)
+            ui.warn(('invalid section mark %r for "%s" of %s\n') %
+                    (mark * 4, title, name))
+            showavailables(ui, initlevel)
             errorcnt += 1
             continue
         nextlevel = mark2level[mark]
         if curlevel < nextlevel and curlevel + 1 != nextlevel:
-            error('gap of section level at "%s" of %s' %
-                  (title, name))
-            showavailables(initlevel)
+            ui.warn(('gap of section level at "%s" of %s\n') %
+                    (title, name))
+            showavailables(ui, initlevel)
             errorcnt += 1
             continue
-        verbose('appropriate section level for "%s %s"' %
+        ui.note(('appropriate section level for "%s %s"\n') %
                 (mark * (nextlevel * 2), title))
         curlevel = nextlevel
 
     return errorcnt
 
-def checkcmdtable(cmdtable, namefmt, initlevel):
+def checkcmdtable(ui, cmdtable, namefmt, initlevel):
     errorcnt = 0
     for k, entry in cmdtable.items():
         name = k.split("|")[0].lstrip("^")
         if not entry[0].__doc__:
-            verbose('skip checking %s: no help document' %
+            ui.note(('skip checking %s: no help document\n') %
                     (namefmt % name))
             continue
-        errorcnt += checkseclevel(entry[0].__doc__,
+        errorcnt += checkseclevel(ui, entry[0].__doc__,
                                   namefmt % name,
                                   initlevel)
     return errorcnt
 
-def checkhghelps():
+def checkhghelps(ui):
     errorcnt = 0
     for names, sec, doc in helptable:
         if callable(doc):
-            doc = doc()
-        errorcnt += checkseclevel(doc,
+            doc = doc(ui)
+        errorcnt += checkseclevel(ui, doc,
                                   '%s help topic' % names[0],
                                   initlevel_topic)
 
-    errorcnt += checkcmdtable(table, '%s command', initlevel_cmd)
+    errorcnt += checkcmdtable(ui, table, '%s command', initlevel_cmd)
 
     for name in sorted(extensions.enabled().keys() +
                        extensions.disabled().keys()):
         mod = extensions.load(None, name, None)
         if not mod.__doc__:
-            verbose('skip checking %s extension: no help document' % name)
+            ui.note(('skip checking %s extension: no help document\n') % name)
             continue
-        errorcnt += checkseclevel(mod.__doc__,
+        errorcnt += checkseclevel(ui, mod.__doc__,
                                   '%s extension' % name,
                                   initlevel_ext)
 
         cmdtable = getattr(mod, 'cmdtable', None)
         if cmdtable:
-            errorcnt += checkcmdtable(cmdtable,
+            errorcnt += checkcmdtable(ui, cmdtable,
                                       '%s command of ' + name + ' extension',
                                       initlevel_ext_cmd)
     return errorcnt
 
-def checkfile(filename, initlevel):
+def checkfile(ui, filename, initlevel):
     if filename == '-':
         filename = 'stdin'
         doc = sys.stdin.read()
@@ -122,11 +114,11 @@
         finally:
             fp.close()
 
-    verbose('checking input from %s with initlevel %d' %
+    ui.note(('checking input from %s with initlevel %d\n') %
             (filename, initlevel))
-    return checkseclevel(doc, 'input from %s' % filename, initlevel)
+    return checkseclevel(ui, doc, 'input from %s' % filename, initlevel)
 
-if __name__ == "__main__":
+def main():
     optparser = optparse.OptionParser("""%prog [options]
 
 This checks all help documents of Mercurial (topics, commands,
@@ -159,11 +151,15 @@
 
     (options, args) = optparser.parse_args()
 
-    _verbose = options.verbose
+    ui = uimod.ui()
+    ui.setconfig('ui', 'verbose', options.verbose, '--verbose')
 
     if options.file:
-        if checkfile(options.file, options.initlevel):
+        if checkfile(ui, options.file, options.initlevel):
             sys.exit(1)
     else:
-        if checkhghelps():
+        if checkhghelps(ui):
             sys.exit(1)
+
+if __name__ == "__main__":
+    main()
--- a/doc/gendoc.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/doc/gendoc.py	Tue Oct 20 15:59:10 2015 -0500
@@ -14,6 +14,7 @@
 from mercurial.i18n import gettext, _
 from mercurial.help import helptable, loaddoc
 from mercurial import extensions
+from mercurial import ui as uimod
 
 def get_desc(docstr):
     if not docstr:
@@ -137,7 +138,7 @@
         if sectionfunc:
             ui.write(sectionfunc(sec))
         if callable(doc):
-            doc = doc()
+            doc = doc(ui)
         ui.write(doc)
         ui.write("\n")
 
@@ -198,7 +199,8 @@
     if len(sys.argv) > 1:
         doc = sys.argv[1]
 
+    ui = uimod.ui()
     if doc == 'hg.1.gendoc':
-        showdoc(sys.stdout)
+        showdoc(ui)
     else:
-        showtopic(sys.stdout, sys.argv[1])
+        showtopic(ui, sys.argv[1])
--- a/doc/hgmanpage.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/doc/hgmanpage.py	Tue Oct 20 15:59:10 2015 -0500
@@ -18,11 +18,11 @@
  7 miscellaneous
  8 system administration
 
-Man pages are written *troff*, a text file formatting system.
+Man pages are written in *troff*, a text file formatting system.
 
 See http://www.tldp.org/HOWTO/Man-Page for a start.
 
-Man pages have no subsection only parts.
+Man pages have no subsections only parts.
 Standard parts
 
   NAME ,
@@ -317,7 +317,7 @@
                     self._cnt = 0
                 self._indent = 2
                 if style == 'arabic':
-                    # indentation depends on number of childrens
+                    # indentation depends on number of children
                     # and start value.
                     self._indent = len(str(len(node.children)))
                     self._indent += len(str(self._cnt)) + 1
@@ -427,7 +427,7 @@
         pass
 
     def visit_block_quote(self, node):
-        # BUG/HACK: indent always uses the _last_ indention,
+        # BUG/HACK: indent always uses the _last_ indentation,
         # thus we need two of them.
         self.indent(BLOCKQOUTE_INDENT)
         self.indent(0)
--- a/doc/style.css	Thu Oct 08 23:24:38 2015 +0900
+++ b/doc/style.css	Tue Oct 20 15:59:10 2015 -0500
@@ -1,8 +1,8 @@
 /*
- * Styles for man pages, which match with http://mercurial.selenic.com/
+ * Styles for man pages, which match with https://mercurial-scm.org/
  *
  * Color scheme & layout are borrowed from
- * http://mercurial.selenic.com/css/styles.css
+ * https://mercurial-scm.org/css/styles.css
  *
  * Some styles are from html4css1.css from Docutils, which is in the
  * public domain.
--- a/hgeditor	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgeditor	Tue Oct 20 15:59:10 2015 -0500
@@ -1,7 +1,7 @@
 #!/bin/sh
 #
 # This is an example of using HGEDITOR to create of diff to review the
-# changes while commiting.
+# changes while committing.
 
 # If you want to pass your favourite editor some other parameters
 # only for Mercurial, modify this:
--- a/hgext/acl.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/acl.py	Tue Oct 20 15:59:10 2015 -0500
@@ -192,7 +192,7 @@
 '''
 
 from mercurial.i18n import _
-from mercurial import util, match
+from mercurial import util, match, error
 import getpass, urllib
 
 # Note for extension authors: ONLY specify testedwith = 'internal' for
@@ -213,7 +213,7 @@
     try:
         return util.groupmembers(group)
     except KeyError:
-        raise util.Abort(_("group '%s' is undefined") % group)
+        raise error.Abort(_("group '%s' is undefined") % group)
 
 def _usermatch(ui, user, usersorgroups):
 
@@ -268,7 +268,7 @@
 
 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
     if hooktype not in ['pretxnchangegroup', 'pretxncommit']:
-        raise util.Abort(_('config error - hook type "%s" cannot stop '
+        raise error.Abort(_('config error - hook type "%s" cannot stop '
                            'incoming changesets nor commits') % hooktype)
     if (hooktype == 'pretxnchangegroup' and
         source not in ui.config('acl', 'sources', 'serve').split()):
@@ -301,11 +301,11 @@
         ctx = repo[rev]
         branch = ctx.branch()
         if denybranches and denybranches(branch):
-            raise util.Abort(_('acl: user "%s" denied on branch "%s"'
+            raise error.Abort(_('acl: user "%s" denied on branch "%s"'
                                ' (changeset "%s")')
                                % (user, branch, ctx))
         if allowbranches and not allowbranches(branch):
-            raise util.Abort(_('acl: user "%s" not allowed on branch "%s"'
+            raise error.Abort(_('acl: user "%s" not allowed on branch "%s"'
                                ' (changeset "%s")')
                                % (user, branch, ctx))
         ui.debug('acl: branch access granted: "%s" on branch "%s"\n'
@@ -313,9 +313,9 @@
 
         for f in ctx.files():
             if deny and deny(f):
-                raise util.Abort(_('acl: user "%s" denied on "%s"'
+                raise error.Abort(_('acl: user "%s" denied on "%s"'
                 ' (changeset "%s")') % (user, f, ctx))
             if allow and not allow(f):
-                raise util.Abort(_('acl: user "%s" not allowed on "%s"'
+                raise error.Abort(_('acl: user "%s" not allowed on "%s"'
                 ' (changeset "%s")') % (user, f, ctx))
         ui.debug('acl: path access granted: "%s"\n' % ctx)
--- a/hgext/blackbox.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/blackbox.py	Tue Oct 20 15:59:10 2015 -0500
@@ -107,9 +107,11 @@
             if blackbox:
                 date = util.datestr(None, '%Y/%m/%d %H:%M:%S')
                 user = util.getuser()
+                pid = str(os.getpid())
                 formattedmsg = msg[0] % msg[1:]
                 try:
-                    blackbox.write('%s %s> %s' % (date, user, formattedmsg))
+                    blackbox.write('%s %s (%s)> %s' %
+                                   (date, user, pid, formattedmsg))
                 except IOError as err:
                     self.debug('warning: cannot write to blackbox.log: %s\n' %
                                err.strerror)
--- a/hgext/bugzilla.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/bugzilla.py	Tue Oct 20 15:59:10 2015 -0500
@@ -279,7 +279,7 @@
 
 from mercurial.i18n import _
 from mercurial.node import short
-from mercurial import cmdutil, mail, util
+from mercurial import cmdutil, mail, util, error
 import re, time, urlparse, xmlrpclib
 
 # Note for extension authors: ONLY specify testedwith = 'internal' for
@@ -358,7 +358,7 @@
             import MySQLdb as mysql
             bzmysql._MySQLdb = mysql
         except ImportError as err:
-            raise util.Abort(_('python mysql support not available: %s') % err)
+            raise error.Abort(_('python mysql support not available: %s') % err)
 
         bzaccess.__init__(self, ui)
 
@@ -392,7 +392,7 @@
         self.run('select fieldid from fielddefs where name = "longdesc"')
         ids = self.cursor.fetchall()
         if len(ids) != 1:
-            raise util.Abort(_('unknown database schema'))
+            raise error.Abort(_('unknown database schema'))
         return ids[0][0]
 
     def filter_real_bug_ids(self, bugs):
@@ -437,7 +437,7 @@
             ret = fp.close()
             if ret:
                 self.ui.warn(out)
-                raise util.Abort(_('bugzilla notify command %s') %
+                raise error.Abort(_('bugzilla notify command %s') %
                                  util.explainexit(ret)[0])
         self.ui.status(_('done\n'))
 
@@ -470,12 +470,12 @@
             try:
                 defaultuser = self.ui.config('bugzilla', 'bzuser')
                 if not defaultuser:
-                    raise util.Abort(_('cannot find bugzilla user id for %s') %
+                    raise error.Abort(_('cannot find bugzilla user id for %s') %
                                      user)
                 userid = self.get_user_id(defaultuser)
                 user = defaultuser
             except KeyError:
-                raise util.Abort(_('cannot find bugzilla user id for %s or %s')
+                raise error.Abort(_('cannot find bugzilla user id for %s or %s')
                                  % (user, defaultuser))
         return (user, userid)
 
@@ -517,7 +517,7 @@
         self.run('select id from fielddefs where name = "longdesc"')
         ids = self.cursor.fetchall()
         if len(ids) != 1:
-            raise util.Abort(_('unknown database schema'))
+            raise error.Abort(_('unknown database schema'))
         return ids[0][0]
 
 # Bugzilla via XMLRPC interface.
@@ -705,7 +705,7 @@
 
         self.bzemail = self.ui.config('bugzilla', 'bzemail')
         if not self.bzemail:
-            raise util.Abort(_("configuration 'bzemail' missing"))
+            raise error.Abort(_("configuration 'bzemail' missing"))
         mail.validateconfig(self.ui)
 
     def makecommandline(self, fieldname, value):
@@ -735,8 +735,8 @@
             matches = self.bzproxy.User.get({'match': [user],
                                              'token': self.bztoken})
             if not matches['users']:
-                raise util.Abort(_("default bugzilla user %s email not found") %
-                                 user)
+                raise error.Abort(_("default bugzilla user %s email not found")
+                                  % user)
         user = matches['users'][0]['email']
         commands.append(self.makecommandline("id", bugid))
 
@@ -789,7 +789,7 @@
         try:
             bzclass = bugzilla._versions[bzversion]
         except KeyError:
-            raise util.Abort(_('bugzilla version %s not supported') %
+            raise error.Abort(_('bugzilla version %s not supported') %
                              bzversion)
         self.bzdriver = bzclass(self.ui)
 
@@ -900,7 +900,7 @@
     bugzilla bug id. only add a comment once per bug, so same change
     seen multiple times does not fill bug with duplicate data.'''
     if node is None:
-        raise util.Abort(_('hook type %s does not pass a changeset id') %
+        raise error.Abort(_('hook type %s does not pass a changeset id') %
                          hooktype)
     try:
         bz = bugzilla(ui, repo)
@@ -911,4 +911,4 @@
                 bz.update(bug, bugs[bug], ctx)
             bz.notify(bugs, util.email(ctx.user()))
     except Exception as e:
-        raise util.Abort(_('Bugzilla error: %s') % e)
+        raise error.Abort(_('Bugzilla error: %s') % e)
--- a/hgext/censor.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/censor.py	Tue Oct 20 15:59:10 2015 -0500
@@ -13,7 +13,7 @@
 
 Typical uses for censor are due to security or legal requirements, including::
 
- * Passwords, private keys, crytographic material
+ * Passwords, private keys, cryptographic material
  * Licensed data/code/libraries for which the license has expired
  * Personally Identifiable Information or other private data
 
@@ -43,47 +43,47 @@
     _('-r REV [-t TEXT] [FILE]'))
 def censor(ui, repo, path, rev='', tombstone='', **opts):
     if not path:
-        raise util.Abort(_('must specify file path to censor'))
+        raise error.Abort(_('must specify file path to censor'))
     if not rev:
-        raise util.Abort(_('must specify revision to censor'))
+        raise error.Abort(_('must specify revision to censor'))
 
     wctx = repo[None]
 
     m = scmutil.match(wctx, (path,))
     if m.anypats() or len(m.files()) != 1:
-        raise util.Abort(_('can only specify an explicit filename'))
+        raise error.Abort(_('can only specify an explicit filename'))
     path = m.files()[0]
     flog = repo.file(path)
     if not len(flog):
-        raise util.Abort(_('cannot censor file with no history'))
+        raise error.Abort(_('cannot censor file with no history'))
 
     rev = scmutil.revsingle(repo, rev, rev).rev()
     try:
         ctx = repo[rev]
     except KeyError:
-        raise util.Abort(_('invalid revision identifier %s') % rev)
+        raise error.Abort(_('invalid revision identifier %s') % rev)
 
     try:
         fctx = ctx.filectx(path)
     except error.LookupError:
-        raise util.Abort(_('file does not exist at revision %s') % rev)
+        raise error.Abort(_('file does not exist at revision %s') % rev)
 
     fnode = fctx.filenode()
     headctxs = [repo[c] for c in repo.heads()]
     heads = [c for c in headctxs if path in c and c.filenode(path) == fnode]
     if heads:
         headlist = ', '.join([short(c.node()) for c in heads])
-        raise util.Abort(_('cannot censor file in heads (%s)') % headlist,
+        raise error.Abort(_('cannot censor file in heads (%s)') % headlist,
             hint=_('clean/delete and commit first'))
 
     wp = wctx.parents()
     if ctx.node() in [p.node() for p in wp]:
-        raise util.Abort(_('cannot censor working directory'),
+        raise error.Abort(_('cannot censor working directory'),
             hint=_('clean/delete/update first'))
 
     flogv = flog.version & 0xFFFF
     if flogv != revlog.REVLOGNG:
-        raise util.Abort(
+        raise error.Abort(
             _('censor does not support revlog version %d') % (flogv,))
 
     tombstone = filelog.packmeta({"censored": tombstone}, "")
@@ -91,7 +91,7 @@
     crev = fctx.filerev()
 
     if len(tombstone) > flog.rawsize(crev):
-        raise util.Abort(_(
+        raise error.Abort(_(
             'censor tombstone must be no longer than censored data'))
 
     # Using two files instead of one makes it easy to rewrite entry-by-entry
--- a/hgext/churn.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/churn.py	Tue Oct 20 15:59:10 2015 -0500
@@ -9,7 +9,7 @@
 '''command to display statistics about repository history'''
 
 from mercurial.i18n import _
-from mercurial import patch, cmdutil, scmutil, util, commands
+from mercurial import patch, cmdutil, scmutil, util, commands, error
 from mercurial import encoding
 import os
 import time, datetime
@@ -27,7 +27,7 @@
         t = cmdutil.changeset_templater(ui, repo, False, None, tmpl,
                                         None, False)
     except SyntaxError as inst:
-        raise util.Abort(inst.args[0])
+        raise error.Abort(inst.args[0])
     return t
 
 def changedlines(ui, repo, ctx1, ctx2, fns):
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/clonebundles.py	Tue Oct 20 15:59:10 2015 -0500
@@ -0,0 +1,266 @@
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+"""advertise pre-generated bundles to seed clones (experimental)
+
+"clonebundles" is a server-side extension used to advertise the existence
+of pre-generated, externally hosted bundle files to clients that are
+cloning so that cloning can be faster, more reliable, and require less
+resources on the server.
+
+Cloning can be a CPU and I/O intensive operation on servers. Traditionally,
+the server, in response to a client's request to clone, dynamically generates
+a bundle containing the entire repository content and sends it to the client.
+There is no caching on the server and the server will have to redundantly
+generate the same outgoing bundle in response to each clone request. For
+servers with large repositories or with high clone volume, the load from
+clones can make scaling the server challenging and costly.
+
+This extension provides server operators the ability to offload potentially
+expensive clone load to an external service. Here's how it works.
+
+1. A server operator establishes a mechanism for making bundle files available
+   on a hosting service where Mercurial clients can fetch them.
+2. A manifest file listing available bundle URLs and some optional metadata
+   is added to the Mercurial repository on the server.
+3. A client initiates a clone against a clone bundles aware server.
+4. The client sees the server is advertising clone bundles and fetches the
+   manifest listing available bundles.
+5. The client filters and sorts the available bundles based on what it
+   supports and prefers.
+6. The client downloads and applies an available bundle from the
+   server-specified URL.
+7. The client reconnects to the original server and performs the equivalent
+   of :hg:`pull` to retrieve all repository data not in the bundle. (The
+   repository could have been updated between when the bundle was created
+   and when the client started the clone.)
+
+Instead of the server generating full repository bundles for every clone
+request, it generates full bundles once and they are subsequently reused to
+bootstrap new clones. The server may still transfer data at clone time.
+However, this is only data that has been added/changed since the bundle was
+created. For large, established repositories, this can reduce server load for
+clones to less than 1% of original.
+
+To work, this extension requires the following of server operators:
+
+* Generating bundle files of repository content (typically periodically,
+  such as once per day).
+* A file server that clients have network access to and that Python knows
+  how to talk to through its normal URL handling facility (typically a
+  HTTP server).
+* A process for keeping the bundles manifest in sync with available bundle
+  files.
+
+Strictly speaking, using a static file hosting server isn't required: a server
+operator could use a dynamic service for retrieving bundle data. However,
+static file hosting services are simple and scalable and should be sufficient
+for most needs.
+
+Bundle files can be generated with the :hg:`bundle` comand. Typically
+:hg:`bundle --all` is used to produce a bundle of the entire repository.
+
+:hg:`debugcreatestreamclonebundle` can be used to produce a special
+*streaming clone bundle*. These are bundle files that are extremely efficient
+to produce and consume (read: fast). However, they are larger than
+traditional bundle formats and require that clients support the exact set
+of repository data store formats in use by the repository that created them.
+Typically, a newer server can serve data that is compatible with older clients.
+However, *streaming clone bundles* don't have this guarantee. **Server
+operators need to be aware that newer versions of Mercurial may produce
+streaming clone bundles incompatible with older Mercurial versions.**
+
+The list of requirements printed by :hg:`debugcreatestreamclonebundle` should
+be specified in the ``requirements`` parameter of the *bundle specification
+string* for the ``BUNDLESPEC`` manifest property described below. e.g.
+``BUNDLESPEC=none-packed1;requirements%3Drevlogv1``.
+
+A server operator is responsible for creating a ``.hg/clonebundles.manifest``
+file containing the list of available bundle files suitable for seeding
+clones. If this file does not exist, the repository will not advertise the
+existence of clone bundles when clients connect.
+
+The manifest file contains a newline (\n) delimited list of entries.
+
+Each line in this file defines an available bundle. Lines have the format:
+
+    <URL> [<key>=<value>[ <key>=<value>]]
+
+That is, a URL followed by an optional, space-delimited list of key=value
+pairs describing additional properties of this bundle. Both keys and values
+are URI encoded.
+
+Keys in UPPERCASE are reserved for use by Mercurial and are defined below.
+All non-uppercase keys can be used by site installations. An example use
+for custom properties is to use the *datacenter* attribute to define which
+data center a file is hosted in. Clients could then prefer a server in the
+data center closest to them.
+
+The following reserved keys are currently defined:
+
+BUNDLESPEC
+   A "bundle specification" string that describes the type of the bundle.
+
+   These are string values that are accepted by the "--type" argument of
+   :hg:`bundle`.
+
+   The values are parsed in strict mode, which means they must be of the
+   "<compression>-<type>" form. See
+   mercurial.exchange.parsebundlespec() for more details.
+
+   Clients will automatically filter out specifications that are unknown or
+   unsupported so they won't attempt to download something that likely won't
+   apply.
+
+   The actual value doesn't impact client behavior beyond filtering:
+   clients will still sniff the bundle type from the header of downloaded
+   files.
+
+   **Use of this key is highly recommended**, as it allows clients to
+   easily skip unsupported bundles.
+
+REQUIRESNI
+   Whether Server Name Indication (SNI) is required to connect to the URL.
+   SNI allows servers to use multiple certificates on the same IP. It is
+   somewhat common in CDNs and other hosting providers. Older Python
+   versions do not support SNI. Defining this attribute enables clients
+   with older Python versions to filter this entry without experiencing
+   an opaque SSL failure at connection time.
+
+   If this is defined, it is important to advertise a non-SNI fallback
+   URL or clients running old Python releases may not be able to clone
+   with the clonebundles facility.
+
+   Value should be "true".
+
+Manifests can contain multiple entries. Assuming metadata is defined, clients
+will filter entries from the manifest that they don't support. The remaining
+entries are optionally sorted by client preferences
+(``experimental.clonebundleprefers`` config option). The client then attempts
+to fetch the bundle at the first URL in the remaining list.
+
+**Errors when downloading a bundle will fail the entire clone operation:
+clients do not automatically fall back to a traditional clone.** The reason
+for this is that if a server is using clone bundles, it is probably doing so
+because the feature is necessary to help it scale. In other words, there
+is an assumption that clone load will be offloaded to another service and
+that the Mercurial server isn't responsible for serving this clone load.
+If that other service experiences issues and clients start mass falling back to
+the original Mercurial server, the added clone load could overwhelm the server
+due to unexpected load and effectively take it offline. Not having clients
+automatically fall back to cloning from the original server mitigates this
+scenario.
+
+Because there is no automatic Mercurial server fallback on failure of the
+bundle hosting service, it is important for server operators to view the bundle
+hosting service as an extension of the Mercurial server in terms of
+availability and service level agreements: if the bundle hosting service goes
+down, so does the ability for clients to clone. Note: clients will see a
+message informing them how to bypass the clone bundles facility when a failure
+occurs. So server operators should prepare for some people to follow these
+instructions when a failure occurs, thus driving more load to the original
+Mercurial server when the bundle hosting service fails.
+
+The following config options influence the behavior of the clone bundles
+feature:
+
+ui.clonebundleadvertise
+   Whether the server advertises the existence of the clone bundles feature
+   to compatible clients that aren't using it.
+
+   When this is enabled (the default), a server will send a message to
+   compatible clients performing a traditional clone informing them of the
+   available clone bundles feature. Compatible clients are those that support
+   bundle2 and are advertising support for the clone bundles feature.
+
+ui.clonebundlefallback
+   Whether to automatically fall back to a traditional clone in case of
+   clone bundles failure. Defaults to false for reasons described above.
+
+experimental.clonebundles
+   Whether the clone bundles feature is enabled on clients. Defaults to true.
+
+experimental.clonebundleprefers
+   List of "key=value" properties the client prefers in bundles. Downloaded
+   bundle manifests will be sorted by the preferences in this list. e.g.
+   the value "BUNDLESPEC=gzip-v1, BUNDLESPEC=bzip2=v1" will prefer a gzipped
+   version 1 bundle type then bzip2 version 1 bundle type.
+
+   If not defined, the order in the manifest will be used and the first
+   available bundle will be downloaded.
+"""
+
+from mercurial.i18n import _
+from mercurial.node import nullid
+from mercurial import (
+    exchange,
+    extensions,
+    wireproto,
+)
+
+testedwith = 'internal'
+
+def capabilities(orig, repo, proto):
+    caps = orig(repo, proto)
+
+    # Only advertise if a manifest exists. This does add some I/O to requests.
+    # But this should be cheaper than a wasted network round trip due to
+    # missing file.
+    if repo.opener.exists('clonebundles.manifest'):
+        caps.append('clonebundles')
+
+    return caps
+
+@wireproto.wireprotocommand('clonebundles', '')
+def bundles(repo, proto):
+    """Server command for returning info for available bundles to seed clones.
+
+    Clients will parse this response and determine what bundle to fetch.
+
+    Other extensions may wrap this command to filter or dynamically emit
+    data depending on the request. e.g. you could advertise URLs for
+    the closest data center given the client's IP address.
+    """
+    return repo.opener.tryread('clonebundles.manifest')
+
+@exchange.getbundle2partsgenerator('clonebundlesadvertise', 0)
+def advertiseclonebundlespart(bundler, repo, source, bundlecaps=None,
+                              b2caps=None, heads=None, common=None,
+                              cbattempted=None, **kwargs):
+    """Inserts an output part to advertise clone bundles availability."""
+    # Allow server operators to disable this behavior.
+    # # experimental config: ui.clonebundleadvertise
+    if not repo.ui.configbool('ui', 'clonebundleadvertise', True):
+        return
+
+    # Only advertise if a manifest is present.
+    if not repo.opener.exists('clonebundles.manifest'):
+        return
+
+    # And when changegroup data is requested.
+    if not kwargs.get('cg', True):
+        return
+
+    # And when the client supports clone bundles.
+    if cbattempted is None:
+        return
+
+    # And when the client didn't attempt a clone bundle as part of this pull.
+    if cbattempted:
+        return
+
+    # And when a full clone is requested.
+    # Note: client should not send "cbattempted" for regular pulls. This check
+    # is defense in depth.
+    if common and common != [nullid]:
+        return
+
+    msg = _('this server supports the experimental "clone bundles" feature '
+            'that should enable faster and more reliable cloning\n'
+            'help test it by setting the "experimental.clonebundles" config '
+            'flag to "true"')
+
+    bundler.newpart('output', data=msg)
+
+def extsetup(ui):
+    extensions.wrapfunction(wireproto, '_capabilities', capabilities)
--- a/hgext/convert/__init__.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/convert/__init__.py	Tue Oct 20 15:59:10 2015 -0500
@@ -25,7 +25,7 @@
 
 @command('convert',
     [('', 'authors', '',
-      _('username mapping filename (DEPRECATED, use --authormap instead)'),
+      _('username mapping filename (DEPRECATED) (use --authormap instead)'),
       _('FILE')),
     ('s', 'source-type', '', _('source repository type'), _('TYPE')),
     ('d', 'dest-type', '', _('destination repository type'), _('TYPE')),
@@ -316,6 +316,9 @@
         ``convert.git.remoteprefix`` as a prefix followed by a /. The default
         is 'remote'.
 
+    :convert.git.skipsubmodules: does not convert root level .gitmodules files
+        or files with 160000 mode indicating a submodule. Default is False.
+
     Perforce Source
     ###############
 
--- a/hgext/convert/bzr.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/convert/bzr.py	Tue Oct 20 15:59:10 2015 -0500
@@ -9,7 +9,7 @@
 # it cannot access 'bar' repositories, but they were never used very much
 
 import os
-from mercurial import demandimport
+from mercurial import demandimport, error
 # these do not work with demandimport, blacklist
 demandimport.ignore.extend([
         'bzrlib.transactions',
@@ -18,7 +18,7 @@
     ])
 
 from mercurial.i18n import _
-from mercurial import util
+from mercurial import error
 from common import NoRepo, commit, converter_source
 
 try:
@@ -108,7 +108,8 @@
                     pass
                 revid = info.rev_id
             if revid is None:
-                raise util.Abort(_('%s is not a valid revision') % self.revs[0])
+                raise error.Abort(_('%s is not a valid revision')
+                                  % self.revs[0])
             heads = [revid]
         # Empty repositories return 'null:', which cannot be retrieved
         heads = [h for h in heads if h != 'null:']
@@ -127,7 +128,7 @@
         if kind == 'symlink':
             target = revtree.get_symlink_target(fileid)
             if target is None:
-                raise util.Abort(_('%s.%s symlink has no target')
+                raise error.Abort(_('%s.%s symlink has no target')
                                  % (name, rev))
             return target, mode
         else:
@@ -136,7 +137,7 @@
 
     def getchanges(self, version, full):
         if full:
-            raise util.Abort(_("convert from cvs do not support --full"))
+            raise error.Abort(_("convert from cvs does not support --full"))
         self._modecache = {}
         self._revtree = self.sourcerepo.revision_tree(version)
         # get the parentids from the cache
--- a/hgext/convert/common.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/convert/common.py	Tue Oct 20 15:59:10 2015 -0500
@@ -7,7 +7,7 @@
 
 import base64, errno, subprocess, os, datetime, re
 import cPickle as pickle
-from mercurial import phases, util
+from mercurial import phases, util, error
 from mercurial.i18n import _
 
 propertycache = util.propertycache
@@ -32,7 +32,7 @@
     name = name or exe
     if not util.findexe(exe):
         if abort:
-            exc = util.Abort
+            exc = error.Abort
         else:
             exc = MissingTool
         raise exc(_('cannot find required "%s" tool') % name)
@@ -73,7 +73,7 @@
             such format for their revision numbering
         """
         if not re.match(r'[0-9a-fA-F]{40,40}$', revstr):
-            raise util.Abort(_('%s entry %s is not a valid revision'
+            raise error.Abort(_('%s entry %s is not a valid revision'
                                ' identifier') % (mapname, revstr))
 
     def before(self):
@@ -82,6 +82,13 @@
     def after(self):
         pass
 
+    def targetfilebelongstosource(self, targetfilename):
+        """Returns true if the given targetfile belongs to the source repo. This
+        is useful when only a subdirectory of the target belongs to the source
+        repo."""
+        # For normal full repo converts, this is always True.
+        return True
+
     def setrevmap(self, revmap):
         """set the map of already-converted revisions"""
         pass
@@ -362,7 +369,7 @@
                 self.ui.warn(_('%s error:\n') % self.command)
                 self.ui.warn(output)
             msg = util.explainexit(status)[0]
-            raise util.Abort('%s %s' % (self.command, msg))
+            raise error.Abort('%s %s' % (self.command, msg))
 
     def run0(self, cmd, *args, **kwargs):
         output, status = self.run(cmd, *args, **kwargs)
@@ -439,7 +446,7 @@
             try:
                 key, value = line.rsplit(' ', 1)
             except ValueError:
-                raise util.Abort(
+                raise error.Abort(
                     _('syntax error in %s(%d): key/value pair expected')
                     % (self.path, i + 1))
             if key not in self:
@@ -452,7 +459,7 @@
             try:
                 self.fp = open(self.path, 'a')
             except IOError as err:
-                raise util.Abort(_('could not open map file %r: %s') %
+                raise error.Abort(_('could not open map file %r: %s') %
                                  (self.path, err.strerror))
         self.fp.write('%s %s\n' % (key, value))
         self.fp.flush()
--- a/hgext/convert/convcmd.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/convert/convcmd.py	Tue Oct 20 15:59:10 2015 -0500
@@ -18,7 +18,7 @@
 import filemap
 
 import os, shutil, shlex
-from mercurial import hg, util, encoding
+from mercurial import hg, util, encoding, error
 from mercurial.i18n import _
 
 orig_encoding = 'ascii'
@@ -82,7 +82,7 @@
 def convertsource(ui, path, type, revs):
     exceptions = []
     if type and type not in [s[0] for s in source_converters]:
-        raise util.Abort(_('%s: invalid source repository type') % type)
+        raise error.Abort(_('%s: invalid source repository type') % type)
     for name, source, sortmode in source_converters:
         try:
             if not type or name == type:
@@ -92,11 +92,11 @@
     if not ui.quiet:
         for inst in exceptions:
             ui.write("%s\n" % inst)
-    raise util.Abort(_('%s: missing or unsupported repository') % path)
+    raise error.Abort(_('%s: missing or unsupported repository') % path)
 
 def convertsink(ui, path, type):
     if type and type not in [s[0] for s in sink_converters]:
-        raise util.Abort(_('%s: invalid destination repository type') % type)
+        raise error.Abort(_('%s: invalid destination repository type') % type)
     for name, sink in sink_converters:
         try:
             if not type or name == type:
@@ -104,8 +104,8 @@
         except NoRepo as inst:
             ui.note(_("convert: %s\n") % inst)
         except MissingTool as inst:
-            raise util.Abort('%s\n' % inst)
-    raise util.Abort(_('%s: unknown repository type') % path)
+            raise error.Abort('%s\n' % inst)
+    raise error.Abort(_('%s: unknown repository type') % path)
 
 class progresssource(object):
     def __init__(self, ui, source, filecount):
@@ -120,6 +120,9 @@
                          item=file, total=self.filecount)
         return self.source.getfile(file, rev)
 
+    def targetfilebelongstosource(self, targetfilename):
+        return self.source.targetfilebelongstosource(targetfilename)
+
     def lookuprev(self, rev):
         return self.source.lookuprev(rev)
 
@@ -182,7 +185,7 @@
                 line = list(lex)
                 # check number of parents
                 if not (2 <= len(line) <= 3):
-                    raise util.Abort(_('syntax error in %s(%d): child parent1'
+                    raise error.Abort(_('syntax error in %s(%d): child parent1'
                                        '[,parent2] expected') % (path, i + 1))
                 for part in line:
                     self.source.checkrevformat(part)
@@ -193,7 +196,7 @@
                     m[child] = p1 + p2
          # if file does not exist or error reading, exit
         except IOError:
-            raise util.Abort(_('splicemap file not found or error reading %s:')
+            raise error.Abort(_('splicemap file not found or error reading %s:')
                                % path)
         return m
 
@@ -244,7 +247,7 @@
                     continue
                 # Parent is not in dest and not being converted, not good
                 if p not in parents:
-                    raise util.Abort(_('unknown splice map parent: %s') % p)
+                    raise error.Abort(_('unknown splice map parent: %s') % p)
                 pc.append(p)
             parents[c] = pc
 
@@ -340,7 +343,7 @@
         elif sortmode == 'closesort':
             picknext = makeclosesorter()
         else:
-            raise util.Abort(_('unknown sort mode: %s') % sortmode)
+            raise error.Abort(_('unknown sort mode: %s') % sortmode)
 
         children, actives = mapchildren(parents)
 
@@ -358,7 +361,7 @@
                 try:
                     pendings[c].remove(n)
                 except ValueError:
-                    raise util.Abort(_('cycle detected between %s and %s')
+                    raise error.Abort(_('cycle detected between %s and %s')
                                        % (recode(c), recode(n)))
                 if not pendings[c]:
                     # Parents are converted, node is eligible
@@ -366,7 +369,7 @@
                     pendings[c] = None
 
         if len(s) != len(parents):
-            raise util.Abort(_("not all revisions were sorted"))
+            raise error.Abort(_("not all revisions were sorted"))
 
         return s
 
@@ -437,7 +440,7 @@
         try:
             parents = self.splicemap[rev]
             self.ui.status(_('spliced in %s as parents of %s\n') %
-                           (parents, rev))
+                           (_(' and ').join(parents), rev))
             parents = [self.map.get(p, p) for p in parents]
         except KeyError:
             parents = [b[0] for b in pbranches]
@@ -553,16 +556,17 @@
     sortmodes = ('branchsort', 'datesort', 'sourcesort', 'closesort')
     sortmode = [m for m in sortmodes if opts.get(m)]
     if len(sortmode) > 1:
-        raise util.Abort(_('more than one sort mode specified'))
+        raise error.Abort(_('more than one sort mode specified'))
     if sortmode:
         sortmode = sortmode[0]
     else:
         sortmode = defaultsort
 
     if sortmode == 'sourcesort' and not srcc.hasnativeorder():
-        raise util.Abort(_('--sourcesort is not supported by this data source'))
+        raise error.Abort(_('--sourcesort is not supported by this data source')
+                         )
     if sortmode == 'closesort' and not srcc.hasnativeclose():
-        raise util.Abort(_('--closesort is not supported by this data source'))
+        raise error.Abort(_('--closesort is not supported by this data source'))
 
     fmap = opts.get('filemap')
     if fmap:
--- a/hgext/convert/cvs.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/convert/cvs.py	Tue Oct 20 15:59:10 2015 -0500
@@ -7,7 +7,7 @@
 
 import os, re, socket, errno
 from cStringIO import StringIO
-from mercurial import encoding, util
+from mercurial import encoding, util, error
 from mercurial.i18n import _
 
 from common import NoRepo, commit, converter_source, checktool
@@ -43,14 +43,14 @@
         maxrev = 0
         if self.revs:
             if len(self.revs) > 1:
-                raise util.Abort(_('cvs source does not support specifying '
+                raise error.Abort(_('cvs source does not support specifying '
                                    'multiple revs'))
             # TODO: handle tags
             try:
                 # patchset number?
                 maxrev = int(self.revs[0])
             except ValueError:
-                raise util.Abort(_('revision %s is not a patchset number')
+                raise error.Abort(_('revision %s is not a patchset number')
                                  % self.revs[0])
 
         d = os.getcwd()
@@ -150,7 +150,7 @@
                 sck.send("\n".join(["BEGIN AUTH REQUEST", root, user, passw,
                                     "END AUTH REQUEST", ""]))
                 if sck.recv(128) != "I LOVE YOU\n":
-                    raise util.Abort(_("CVS pserver authentication failed"))
+                    raise error.Abort(_("CVS pserver authentication failed"))
 
                 self.writep = self.readp = sck.makefile('r+')
 
@@ -193,7 +193,7 @@
         self.writep.flush()
         r = self.readp.readline()
         if not r.startswith("Valid-requests"):
-            raise util.Abort(_('unexpected response from CVS server '
+            raise error.Abort(_('unexpected response from CVS server '
                                '(expected "Valid-requests", but got %r)')
                              % r)
         if "UseUnchanged" in r:
@@ -215,7 +215,7 @@
             while count > 0:
                 data = fp.read(min(count, chunksize))
                 if not data:
-                    raise util.Abort(_("%d bytes missing from remote file")
+                    raise error.Abort(_("%d bytes missing from remote file")
                                      % count)
                 count -= len(data)
                 output.write(data)
@@ -252,18 +252,18 @@
             else:
                 if line == "ok\n":
                     if mode is None:
-                        raise util.Abort(_('malformed response from CVS'))
+                        raise error.Abort(_('malformed response from CVS'))
                     return (data, "x" in mode and "x" or "")
                 elif line.startswith("E "):
                     self.ui.warn(_("cvs server: %s\n") % line[2:])
                 elif line.startswith("Remove"):
                     self.readp.readline()
                 else:
-                    raise util.Abort(_("unknown CVS response: %s") % line)
+                    raise error.Abort(_("unknown CVS response: %s") % line)
 
     def getchanges(self, rev, full):
         if full:
-            raise util.Abort(_("convert from cvs do not support --full"))
+            raise error.Abort(_("convert from cvs does not support --full"))
         self._parse()
         return sorted(self.files[rev].iteritems()), {}, set()
 
--- a/hgext/convert/cvsps.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/convert/cvsps.py	Tue Oct 20 15:59:10 2015 -0500
@@ -207,6 +207,7 @@
     # state machine begins here
     tags = {}     # dictionary of revisions on current file with their tags
     branchmap = {} # mapping between branch names and revision numbers
+    rcsmap = {}
     state = 0
     store = False # set when a new record can be appended
 
@@ -439,6 +440,8 @@
 
             log.append(e)
 
+            rcsmap[e.rcs.replace('/Attic/', '/')] = e.rcs
+
             if len(log) % 100 == 0:
                 ui.status(util.ellipsis('%d %s' % (len(log), e.file), 80)+'\n')
 
@@ -446,6 +449,13 @@
 
     # find parent revisions of individual files
     versions = {}
+    for e in sorted(oldlog, key=lambda x: (x.rcs, x.revision)):
+        rcs = e.rcs.replace('/Attic/', '/')
+        if rcs in rcsmap:
+            e.rcs = rcsmap[rcs]
+        branch = e.revision[:-1]
+        versions[(e.rcs, branch)] = e.revision
+
     for e in log:
         branch = e.revision[:-1]
         p = versions.get((e.rcs, branch), None)
--- a/hgext/convert/darcs.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/convert/darcs.py	Tue Oct 20 15:59:10 2015 -0500
@@ -7,7 +7,7 @@
 
 from common import NoRepo, checktool, commandline, commit, converter_source
 from mercurial.i18n import _
-from mercurial import util
+from mercurial import util, error
 import os, shutil, tempfile, re, errno
 
 # The naming drift of ElementTree is fun!
@@ -39,11 +39,11 @@
         checktool('darcs')
         version = self.run0('--version').splitlines()[0].strip()
         if version < '2.1':
-            raise util.Abort(_('darcs version 2.1 or newer needed (found %r)') %
-                             version)
+            raise error.Abort(_('darcs version 2.1 or newer needed (found %r)')
+                              % version)
 
         if "ElementTree" not in globals():
-            raise util.Abort(_("Python ElementTree module is not available"))
+            raise error.Abort(_("Python ElementTree module is not available"))
 
         self.path = os.path.realpath(path)
 
@@ -158,7 +158,7 @@
 
     def getchanges(self, rev, full):
         if full:
-            raise util.Abort(_("convert from darcs do not support --full"))
+            raise error.Abort(_("convert from darcs does not support --full"))
         copies = {}
         changes = []
         man = None
@@ -192,7 +192,7 @@
 
     def getfile(self, name, rev):
         if rev != self.lastrev:
-            raise util.Abort(_('internal calling inconsistency'))
+            raise error.Abort(_('internal calling inconsistency'))
         path = os.path.join(self.tmppath, name)
         try:
             data = util.readfile(path)
--- a/hgext/convert/filemap.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/convert/filemap.py	Tue Oct 20 15:59:10 2015 -0500
@@ -7,7 +7,7 @@
 import posixpath
 import shlex
 from mercurial.i18n import _
-from mercurial import util, error
+from mercurial import error
 from common import SKIPREV, converter_source
 
 def rpairs(path):
@@ -42,9 +42,10 @@
         self.include = {}
         self.exclude = {}
         self.rename = {}
+        self.targetprefixes = None
         if path:
             if self.parse(path):
-                raise util.Abort(_('errors in filemap'))
+                raise error.Abort(_('errors in filemap'))
 
     def parse(self, path):
         errs = 0
@@ -100,6 +101,30 @@
                 pass
         return '', name, ''
 
+    def istargetfile(self, filename):
+        """Return true if the given target filename is covered as a destination
+        of the filemap. This is useful for identifying what parts of the target
+        repo belong to the source repo and what parts don't."""
+        if self.targetprefixes is None:
+            self.targetprefixes = set()
+            for before, after in self.rename.iteritems():
+                self.targetprefixes.add(after)
+
+        # If "." is a target, then all target files are considered from the
+        # source.
+        if not self.targetprefixes or '.' in self.targetprefixes:
+            return True
+
+        filename = normalize(filename)
+        for pre, suf in rpairs(filename):
+            # This check is imperfect since it doesn't account for the
+            # include/exclude list, but it should work in filemaps that don't
+            # apply include/exclude to the same source directories they are
+            # renaming.
+            if pre in self.targetprefixes:
+                return True
+        return False
+
     def __call__(self, name):
         if self.include:
             inc = self.lookup(name, self.include)[0]
@@ -266,7 +291,7 @@
         try:
             files = self.base.getchangedfiles(rev, i)
         except NotImplementedError:
-            raise util.Abort(_("source repository doesn't support --filemap"))
+            raise error.Abort(_("source repository doesn't support --filemap"))
         for f in files:
             if self.filemapper(f):
                 return True
@@ -410,6 +435,9 @@
 
         return files, ncopies, ncleanp2
 
+    def targetfilebelongstosource(self, targetfilename):
+        return self.filemapper.istargetfile(targetfilename)
+
     def getfile(self, name, rev):
         realname, realrev = rev
         return self.base.getfile(realname, realrev)
--- a/hgext/convert/git.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/convert/git.py	Tue Oct 20 15:59:10 2015 -0500
@@ -97,7 +97,7 @@
         # The default value (50) is based on the default for 'git diff'.
         similarity = ui.configint('convert', 'git.similarity', default=50)
         if similarity < 0 or similarity > 100:
-            raise util.Abort(_('similarity must be between 0 and 100'))
+            raise error.Abort(_('similarity must be between 0 and 100'))
         if similarity > 0:
             self.simopt = '-C%d%%' % similarity
             findcopiesharder = ui.configbool('convert', 'git.findcopiesharder',
@@ -123,14 +123,14 @@
             heads, ret = self.gitread('git rev-parse --branches --remotes')
             heads = heads.splitlines()
             if ret:
-                raise util.Abort(_('cannot retrieve git heads'))
+                raise error.Abort(_('cannot retrieve git heads'))
         else:
             heads = []
             for rev in self.revs:
                 rawhead, ret = self.gitread("git rev-parse --verify %s" % rev)
                 heads.append(rawhead[:-1])
                 if ret:
-                    raise util.Abort(_('cannot retrieve git head "%s"') % rev)
+                    raise error.Abort(_('cannot retrieve git head "%s"') % rev)
         return heads
 
     def catfile(self, rev, type):
@@ -140,11 +140,11 @@
         self.catfilepipe[0].flush()
         info = self.catfilepipe[1].readline().split()
         if info[1] != type:
-            raise util.Abort(_('cannot read %r object at %s') % (type, rev))
+            raise error.Abort(_('cannot read %r object at %s') % (type, rev))
         size = int(info[2])
         data = self.catfilepipe[1].read(size)
         if len(data) < size:
-            raise util.Abort(_('cannot read %r object at %s: unexpected size')
+            raise error.Abort(_('cannot read %r object at %s: unexpected size')
                              % (type, rev))
         # read the trailing newline
         self.catfilepipe[1].read(1)
@@ -210,7 +210,7 @@
 
     def getchanges(self, version, full):
         if full:
-            raise util.Abort(_("convert from git do not support --full"))
+            raise error.Abort(_("convert from git does not support --full"))
         self.modecache = {}
         fh = self.gitopen("git diff-tree -z --root -m -r %s %s" % (
             self.simopt, version))
@@ -224,6 +224,8 @@
         lcount = len(difftree)
         i = 0
 
+        skipsubmodules = self.ui.configbool('convert', 'git.skipsubmodules',
+                                            False)
         def add(entry, f, isdest):
             seen.add(f)
             h = entry[3]
@@ -232,6 +234,9 @@
             renamesource = (not isdest and entry[4][0] == 'R')
 
             if f == '.gitmodules':
+                if skipsubmodules:
+                    return
+
                 subexists[0] = True
                 if entry[4] == 'D' or renamesource:
                     subdeleted[0] = True
@@ -239,7 +244,8 @@
                 else:
                     changes.append(('.hgsub', ''))
             elif entry[1] == '160000' or entry[0] == ':160000':
-                subexists[0] = True
+                if not skipsubmodules:
+                    subexists[0] = True
             else:
                 if renamesource:
                     h = hex(nullid)
@@ -277,7 +283,7 @@
                         copies[fdest] = f
             entry = None
         if fh.close():
-            raise util.Abort(_('cannot read changes in %s') % version)
+            raise error.Abort(_('cannot read changes in %s') % version)
 
         if subexists[0]:
             if subdeleted[0]:
@@ -336,13 +342,13 @@
         for line in fh:
             line = line.strip()
             if line.startswith("error:") or line.startswith("fatal:"):
-                raise util.Abort(_('cannot read tags from %s') % self.path)
+                raise error.Abort(_('cannot read tags from %s') % self.path)
             node, tag = line.split(None, 1)
             if not tag.startswith(prefix):
                 continue
             alltags[tag[len(prefix):]] = node
         if fh.close():
-            raise util.Abort(_('cannot read tags from %s') % self.path)
+            raise error.Abort(_('cannot read tags from %s') % self.path)
 
         # Filter out tag objects for annotated tag refs
         for tag in alltags:
@@ -370,35 +376,38 @@
                               '"%s^%s" --' % (version, version, i + 1))
             changes = [f.rstrip('\n') for f in fh]
         if fh.close():
-            raise util.Abort(_('cannot read changes in %s') % version)
+            raise error.Abort(_('cannot read changes in %s') % version)
 
         return changes
 
     def getbookmarks(self):
         bookmarks = {}
 
-        # Interesting references in git are prefixed
-        prefix = 'refs/heads/'
-        prefixlen = len(prefix)
+        # Handle local and remote branches
+        remoteprefix = self.ui.config('convert', 'git.remoteprefix', 'remote')
+        reftypes = [
+            # (git prefix, hg prefix)
+            ('refs/remotes/origin/', remoteprefix + '/'),
+            ('refs/heads/', '')
+        ]
 
-        # factor two commands
-        remoteprefix = self.ui.config('convert', 'git.remoteprefix', 'remote')
-        gitcmd = { remoteprefix + '/': 'git ls-remote --heads origin',
-                                   '': 'git show-ref'}
+        exclude = set([
+            'refs/remotes/origin/HEAD',
+        ])
 
-        # Origin heads
-        for reftype in gitcmd:
-            try:
-                fh = self.gitopen(gitcmd[reftype], err=subprocess.PIPE)
-                for line in fh:
-                    line = line.strip()
-                    rev, name = line.split(None, 1)
-                    if not name.startswith(prefix):
+        try:
+            fh = self.gitopen('git show-ref', err=subprocess.PIPE)
+            for line in fh:
+                line = line.strip()
+                rev, name = line.split(None, 1)
+                # Process each type of branch
+                for gitprefix, hgprefix in reftypes:
+                    if not name.startswith(gitprefix) or name in exclude:
                         continue
-                    name = '%s%s' % (reftype, name[prefixlen:])
+                    name = '%s%s' % (hgprefix, name[len(gitprefix):])
                     bookmarks[name] = rev
-            except Exception:
-                pass
+        except Exception:
+            pass
 
         return bookmarks
 
--- a/hgext/convert/gnuarch.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/convert/gnuarch.py	Tue Oct 20 15:59:10 2015 -0500
@@ -8,7 +8,7 @@
 
 from common import NoRepo, commandline, commit, converter_source
 from mercurial.i18n import _
-from mercurial import encoding, util
+from mercurial import encoding, util, error
 import os, shutil, tempfile, stat
 from email.Parser import Parser
 
@@ -42,7 +42,7 @@
             if util.findexe('tla'):
                 self.execmd = 'tla'
             else:
-                raise util.Abort(_('cannot find a GNU Arch tool'))
+                raise error.Abort(_('cannot find a GNU Arch tool'))
 
         commandline.__init__(self, ui, self.execmd)
 
@@ -135,7 +135,7 @@
 
     def getfile(self, name, rev):
         if rev != self.lastrev:
-            raise util.Abort(_('internal calling inconsistency'))
+            raise error.Abort(_('internal calling inconsistency'))
 
         if not os.path.lexists(os.path.join(self.tmppath, name)):
             return None, None
@@ -144,7 +144,7 @@
 
     def getchanges(self, rev, full):
         if full:
-            raise util.Abort(_("convert from arch do not support --full"))
+            raise error.Abort(_("convert from arch does not support --full"))
         self._update(rev)
         changes = []
         copies = {}
@@ -287,7 +287,7 @@
                 self.changes[rev].continuationof = self.recode(
                     catlog['Continuation-of'])
         except Exception:
-            raise util.Abort(_('could not parse cat-log of %s') % rev)
+            raise error.Abort(_('could not parse cat-log of %s') % rev)
 
     def _parsechangeset(self, data, rev):
         for l in data:
--- a/hgext/convert/hg.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/convert/hg.py	Tue Oct 20 15:59:10 2015 -0500
@@ -23,6 +23,7 @@
 from mercurial.node import bin, hex, nullid
 from mercurial import hg, util, context, bookmarks, error, scmutil, exchange
 from mercurial import phases
+from mercurial import merge as mergemod
 
 from common import NoRepo, commit, converter_source, converter_sink, mapfile
 
@@ -176,14 +177,58 @@
 
         return fp.getvalue()
 
+    def _calculatemergedfiles(self, source, p1ctx, p2ctx):
+        """Calculates the files from p2 that we need to pull in when merging p1
+        and p2, given that the merge is coming from the given source.
+
+        This prevents us from losing files that only exist in the target p2 and
+        that don't come from the source repo (like if you're merging multiple
+        repositories together).
+        """
+        anc = [p1ctx.ancestor(p2ctx)]
+        # Calculate what files are coming from p2
+        actions, diverge, rename = mergemod.calculateupdates(
+            self.repo, p1ctx, p2ctx, anc,
+            True,  # branchmerge
+            True,  # force
+            False, # partial
+            False, # acceptremote
+            False, # followcopies
+        )
+
+        for file, (action, info, msg) in actions.iteritems():
+            if source.targetfilebelongstosource(file):
+                # If the file belongs to the source repo, ignore the p2
+                # since it will be covered by the existing fileset.
+                continue
+
+            # If the file requires actual merging, abort. We don't have enough
+            # context to resolve merges correctly.
+            if action in ['m', 'dm', 'cd', 'dc']:
+                raise error.Abort(_("unable to convert merge commit "
+                    "since target parents do not merge cleanly (file "
+                    "%s, parents %s and %s)") % (file, p1ctx,
+                                                 p2ctx))
+            elif action == 'k':
+                # 'keep' means nothing changed from p1
+                continue
+            else:
+                # Any other change means we want to take the p2 version
+                yield file
+
     def putcommit(self, files, copies, parents, commit, source, revmap, full,
                   cleanp2):
         files = dict(files)
 
         def getfilectx(repo, memctx, f):
-            if p2ctx and f in cleanp2 and f not in copies:
+            if p2ctx and f in p2files and f not in copies:
                 self.ui.debug('reusing %s from p2\n' % f)
-                return p2ctx[f]
+                try:
+                    return p2ctx[f]
+                except error.ManifestLookupError:
+                    # If the file doesn't exist in p2, then we're syncing a
+                    # delete, so just return None.
+                    return None
             try:
                 v = files[f]
             except KeyError:
@@ -255,6 +300,7 @@
         while parents:
             p1 = p2
             p2 = parents.pop(0)
+            p1ctx = self.repo[p1]
             p2ctx = None
             if p2 != nullid:
                 p2ctx = self.repo[p2]
@@ -262,6 +308,13 @@
             if full:
                 fileset.update(self.repo[p1])
                 fileset.update(self.repo[p2])
+
+            if p2ctx:
+                p2files = set(cleanp2)
+                for file in self._calculatemergedfiles(source, p1ctx, p2ctx):
+                    p2files.add(file)
+                    fileset.add(file)
+
             ctx = context.memctx(self.repo, (p1, p2), text, fileset,
                                  getfilectx, commit.author, commit.date, extra)
 
@@ -370,7 +423,7 @@
 
     def hascommitforsplicemap(self, rev):
         if rev not in self.repo and self.clonebranches:
-            raise util.Abort(_('revision %s not found in destination '
+            raise error.Abort(_('revision %s not found in destination '
                                'repository (lookups with clonebranches=true '
                                'are not implemented)') % rev)
         return rev in self.repo
@@ -378,9 +431,6 @@
 class mercurial_source(converter_source):
     def __init__(self, ui, path, revs=None):
         converter_source.__init__(self, ui, path, revs)
-        if revs and len(revs) > 1:
-            raise util.Abort(_("mercurial source does not support specifying "
-                               "multiple revisions"))
         self.ignoreerrors = ui.configbool('convert', 'hg.ignoreerrors', False)
         self.ignored = set()
         self.saverev = ui.configbool('convert', 'hg.saverev', False)
@@ -405,7 +455,7 @@
                 try:
                     startnode = self.repo.lookup(startnode)
                 except error.RepoError:
-                    raise util.Abort(_('%s is not a valid start revision')
+                    raise error.Abort(_('%s is not a valid start revision')
                                      % startnode)
                 startrev = self.repo.changelog.rev(startnode)
                 children = {startnode: 1}
@@ -415,12 +465,12 @@
             else:
                 self.keep = util.always
             if revs:
-                self._heads = [self.repo[revs[0]].node()]
+                self._heads = [self.repo[r].node() for r in revs]
             else:
                 self._heads = self.repo.heads()
         else:
             if revs or startnode is not None:
-                raise util.Abort(_('hg.revs cannot be combined with '
+                raise error.Abort(_('hg.revs cannot be combined with '
                                    'hg.startrev or --rev'))
             nodes = set()
             parents = set()
--- a/hgext/convert/monotone.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/convert/monotone.py	Tue Oct 20 15:59:10 2015 -0500
@@ -7,7 +7,7 @@
 # GNU General Public License version 2 or any later version.
 
 import os, re
-from mercurial import util
+from mercurial import util, error
 from common import NoRepo, commit, converter_source, checktool
 from common import commandline
 from mercurial.i18n import _
@@ -16,7 +16,7 @@
     def __init__(self, ui, path=None, revs=None):
         converter_source.__init__(self, ui, path, revs)
         if revs and len(revs) > 1:
-            raise util.Abort(_('monotone source does not support specifying '
+            raise error.Abort(_('monotone source does not support specifying '
                                'multiple revs'))
         commandline.__init__(self, ui, 'mtn')
 
@@ -110,34 +110,34 @@
         while read != ':':
             read = self.mtnreadfp.read(1)
             if not read:
-                raise util.Abort(_('bad mtn packet - no end of commandnbr'))
+                raise error.Abort(_('bad mtn packet - no end of commandnbr'))
             commandnbr += read
         commandnbr = commandnbr[:-1]
 
         stream = self.mtnreadfp.read(1)
         if stream not in 'mewptl':
-            raise util.Abort(_('bad mtn packet - bad stream type %s') % stream)
+            raise error.Abort(_('bad mtn packet - bad stream type %s') % stream)
 
         read = self.mtnreadfp.read(1)
         if read != ':':
-            raise util.Abort(_('bad mtn packet - no divider before size'))
+            raise error.Abort(_('bad mtn packet - no divider before size'))
 
         read = None
         lengthstr = ''
         while read != ':':
             read = self.mtnreadfp.read(1)
             if not read:
-                raise util.Abort(_('bad mtn packet - no end of packet size'))
+                raise error.Abort(_('bad mtn packet - no end of packet size'))
             lengthstr += read
         try:
             length = long(lengthstr[:-1])
         except TypeError:
-            raise util.Abort(_('bad mtn packet - bad packet size %s')
+            raise error.Abort(_('bad mtn packet - bad packet size %s')
                 % lengthstr)
 
         read = self.mtnreadfp.read(length)
         if len(read) != length:
-            raise util.Abort(_("bad mtn packet - unable to read full packet "
+            raise error.Abort(_("bad mtn packet - unable to read full packet "
                 "read %s of %s") % (len(read), length))
 
         return (commandnbr, stream, length, read)
@@ -152,7 +152,7 @@
             if stream == 'l':
                 # End of command
                 if output != '0':
-                    raise util.Abort(_("mtn command '%s' returned %s") %
+                    raise error.Abort(_("mtn command '%s' returned %s") %
                         (command, output))
                 break
             elif stream in 'ew':
@@ -229,7 +229,8 @@
 
     def getchanges(self, rev, full):
         if full:
-            raise util.Abort(_("convert from monotone do not support --full"))
+            raise error.Abort(_("convert from monotone does not support "
+                              "--full"))
         revision = self.mtnrun("get_revision", rev).split("\n\n")
         files = {}
         ignoremove = {}
@@ -330,7 +331,7 @@
             versionstr = self.mtnrunsingle("interface_version")
             version = float(versionstr)
         except Exception:
-            raise util.Abort(_("unable to determine mtn automate interface "
+            raise error.Abort(_("unable to determine mtn automate interface "
                 "version"))
 
         if version >= 12.0:
@@ -344,12 +345,12 @@
             # read the headers
             read = self.mtnreadfp.readline()
             if read != 'format-version: 2\n':
-                raise util.Abort(_('mtn automate stdio header unexpected: %s')
+                raise error.Abort(_('mtn automate stdio header unexpected: %s')
                     % read)
             while read != '\n':
                 read = self.mtnreadfp.readline()
                 if not read:
-                    raise util.Abort(_("failed to reach end of mtn automate "
+                    raise error.Abort(_("failed to reach end of mtn automate "
                         "stdio headers"))
         else:
             self.ui.debug("mtn automate version %s - not using automate stdio "
--- a/hgext/convert/p4.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/convert/p4.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,7 +5,7 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-from mercurial import util
+from mercurial import util, error
 from mercurial.i18n import _
 
 from common import commit, converter_source, checktool, NoRepo
@@ -70,7 +70,7 @@
         self.re_keywords_old = re.compile("\$(Id|Header):[^$\n]*\$")
 
         if revs and len(revs) > 1:
-            raise util.Abort(_("p4 source does not support specifying "
+            raise error.Abort(_("p4 source does not support specifying "
                                "multiple revisions"))
         self._parse(ui, path)
 
@@ -277,7 +277,7 @@
 
     def getchanges(self, rev, full):
         if full:
-            raise util.Abort(_("convert from p4 do not support --full"))
+            raise error.Abort(_("convert from p4 does not support --full"))
         return self.files[rev], self.copies[rev], set()
 
     def getcommit(self, rev):
--- a/hgext/convert/subversion.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/convert/subversion.py	Tue Oct 20 15:59:10 2015 -0500
@@ -6,7 +6,7 @@
 import xml.dom.minidom
 import cPickle as pickle
 
-from mercurial import strutil, scmutil, util, encoding
+from mercurial import strutil, scmutil, util, encoding, error
 from mercurial.i18n import _
 
 propertycache = util.propertycache
@@ -141,7 +141,7 @@
     avoid memory collection issues.
     """
     if svn is None:
-        raise util.Abort(_('debugsvnlog could not load Subversion python '
+        raise error.Abort(_('debugsvnlog could not load Subversion python '
                            'bindings'))
 
     util.setbinary(sys.stdin)
@@ -159,14 +159,14 @@
             try:
                 entry = pickle.load(self._stdout)
             except EOFError:
-                raise util.Abort(_('Mercurial failed to run itself, check'
+                raise error.Abort(_('Mercurial failed to run itself, check'
                                    ' hg executable is in PATH'))
             try:
                 orig_paths, revnum, author, date, message = entry
             except (TypeError, ValueError):
                 if entry is None:
                     break
-                raise util.Abort(_("log stream exception '%s'") % entry)
+                raise error.Abort(_("log stream exception '%s'") % entry)
             yield entry
 
     def close(self):
@@ -327,12 +327,12 @@
 
         if revs:
             if len(revs) > 1:
-                raise util.Abort(_('subversion source does not support '
+                raise error.Abort(_('subversion source does not support '
                                    'specifying multiple revisions'))
             try:
                 latest = int(revs[0])
             except ValueError:
-                raise util.Abort(_('svn: revision %s is not an integer') %
+                raise error.Abort(_('svn: revision %s is not an integer') %
                                  revs[0])
 
         self.trunkname = self.ui.config('convert', 'svn.trunk',
@@ -343,7 +343,7 @@
             if self.startrev < 0:
                 self.startrev = 0
         except ValueError:
-            raise util.Abort(_('svn: start revision %s is not an integer')
+            raise error.Abort(_('svn: start revision %s is not an integer')
                              % self.startrev)
 
         try:
@@ -351,7 +351,7 @@
         except SvnPathNotFound:
             self.head = None
         if not self.head:
-            raise util.Abort(_('no revision found in module %s')
+            raise error.Abort(_('no revision found in module %s')
                              % self.module)
         self.last_changed = self.revnum(self.head)
 
@@ -396,8 +396,8 @@
                     # we are converting from inside this directory
                     return None
                 if cfgpath:
-                    raise util.Abort(_('expected %s to be at %r, but not found')
-                                 % (name, path))
+                    raise error.Abort(_('expected %s to be at %r, but not found'
+                                       ) % (name, path))
                 return None
             self.ui.note(_('found %s at %r\n') % (name, path))
             return path
@@ -415,7 +415,7 @@
             self.module += '/' + trunk
             self.head = self.latest(self.module, self.last_changed)
             if not self.head:
-                raise util.Abort(_('no revision found in module %s')
+                raise error.Abort(_('no revision found in module %s')
                                  % self.module)
 
         # First head in the list is the module's head
@@ -442,11 +442,11 @@
 
         if self.startrev and self.heads:
             if len(self.heads) > 1:
-                raise util.Abort(_('svn: start revision is not supported '
+                raise error.Abort(_('svn: start revision is not supported '
                                    'with more than one branch'))
             revnum = self.revnum(self.heads[0])
             if revnum < self.startrev:
-                raise util.Abort(
+                raise error.Abort(
                     _('svn: no revision found after start revision %d')
                                  % self.startrev)
 
@@ -502,7 +502,7 @@
                 stop = revnum + 1
             self._fetch_revisions(revnum, stop)
             if rev not in self.commits:
-                raise util.Abort(_('svn: revision %s not found') % revnum)
+                raise error.Abort(_('svn: revision %s not found') % revnum)
         revcommit = self.commits[rev]
         # caller caches the result, so free it here to release memory
         del self.commits[rev]
@@ -513,7 +513,7 @@
         if not re.match(r'svn:[0-9a-f]{8,8}-[0-9a-f]{4,4}-'
                               '[0-9a-f]{4,4}-[0-9a-f]{4,4}-[0-9a-f]'
                               '{12,12}(.*)\@[0-9]+$',revstr):
-            raise util.Abort(_('%s entry %s is not a valid revision'
+            raise error.Abort(_('%s entry %s is not a valid revision'
                                ' identifier') % (mapname, revstr))
 
     def numcommits(self):
@@ -951,7 +951,7 @@
         except SubversionException as xxx_todo_changeme:
             (inst, num) = xxx_todo_changeme.args
             if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
-                raise util.Abort(_('svn: branch has no revision %s')
+                raise error.Abort(_('svn: branch has no revision %s')
                                  % to_revnum)
             raise
 
@@ -1052,7 +1052,7 @@
         try:
             stdin.close()
         except IOError:
-            raise util.Abort(_('Mercurial failed to run itself, check'
+            raise error.Abort(_('Mercurial failed to run itself, check'
                                ' hg executable is in PATH'))
         return logstream(stdout)
 
@@ -1302,7 +1302,7 @@
                     return parents[0]
                 self.ui.warn(_('unexpected svn output:\n'))
                 self.ui.warn(output)
-                raise util.Abort(_('unable to cope with svn output'))
+                raise error.Abort(_('unable to cope with svn output'))
             if commit.rev:
                 self.run('propset', 'hg:convert-rev', commit.rev,
                          revprop=True, revision=rev)
@@ -1329,6 +1329,6 @@
         # repository and childmap would not list all revisions. Too bad.
         if rev in self.childmap:
             return True
-        raise util.Abort(_('splice map revision %s not found in subversion '
+        raise error.Abort(_('splice map revision %s not found in subversion '
                            'child map (revision lookups are not implemented)')
                          % rev)
--- a/hgext/eol.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/eol.py	Tue Oct 20 15:59:10 2015 -0500
@@ -23,7 +23,7 @@
 ``native`` is an alias for checking out in the platform's default line
 ending: ``LF`` on Unix (including Mac OS X) and ``CRLF`` on
 Windows. Note that ``BIN`` (do nothing to line endings) is Mercurial's
-default behaviour; it is only needed if you need to override a later,
+default behavior; it is only needed if you need to override a later,
 more general pattern.
 
 The optional ``[repository]`` section specifies the line endings to
@@ -247,7 +247,7 @@
         for node, target, f in failed:
             msgs.append(_("  %s in %s should not have %s line endings") %
                         (f, node, eols[target]))
-        raise util.Abort(_("end-of-line check failed:\n") + "\n".join(msgs))
+        raise error.Abort(_("end-of-line check failed:\n") + "\n".join(msgs))
 
 def checkallhook(ui, repo, node, hooktype, **kwargs):
     """verify that files have expected EOLs"""
@@ -333,7 +333,7 @@
                     # so ignore the error.
                     pass
 
-        def commitctx(self, ctx, error=False):
+        def commitctx(self, ctx, haserror=False):
             for f in sorted(ctx.added() + ctx.modified()):
                 if not self._eolfile(f):
                     continue
@@ -347,8 +347,8 @@
                     # have all non-binary files taken care of.
                     continue
                 if inconsistenteol(data):
-                    raise util.Abort(_("inconsistent newline style "
+                    raise error.Abort(_("inconsistent newline style "
                                        "in %s\n") % f)
-            return super(eolrepo, self).commitctx(ctx, error)
+            return super(eolrepo, self).commitctx(ctx, haserror)
     repo.__class__ = eolrepo
     repo._hgcleardirstate()
--- a/hgext/extdiff.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/extdiff.py	Tue Oct 20 15:59:10 2015 -0500
@@ -63,7 +63,7 @@
 from mercurial.i18n import _
 from mercurial.node import short, nullid
 from mercurial import cmdutil, scmutil, util, commands, encoding, filemerge
-from mercurial import archival
+from mercurial import archival, error
 import os, shlex, shutil, tempfile, re
 
 cmdtable = {}
@@ -127,7 +127,7 @@
 
     if revs and change:
         msg = _('cannot specify --rev and --change at the same time')
-        raise util.Abort(msg)
+        raise error.Abort(msg)
     elif change:
         node2 = scmutil.revsingle(repo, change, None).node()
         node1a, node1b = repo.changelog.parents(node2)
@@ -146,72 +146,94 @@
     subrepos=opts.get('subrepos')
 
     matcher = scmutil.match(repo[node2], pats, opts)
-    mod_a, add_a, rem_a = map(set, repo.status(node1a, node2, matcher,
-                                               listsubrepos=subrepos)[:3])
-    if do3way:
-        mod_b, add_b, rem_b = map(set, repo.status(node1b, node2, matcher,
-                                                   listsubrepos=subrepos)[:3])
+
+    if opts.get('patch'):
+        if subrepos:
+            raise error.Abort(_('--patch cannot be used with --subrepos'))
+        if node2 is None:
+            raise error.Abort(_('--patch requires two revisions'))
     else:
-        mod_b, add_b, rem_b = set(), set(), set()
-    modadd = mod_a | add_a | mod_b | add_b
-    common = modadd | rem_a | rem_b
-    if not common:
-        return 0
+        mod_a, add_a, rem_a = map(set, repo.status(node1a, node2, matcher,
+                                                   listsubrepos=subrepos)[:3])
+        if do3way:
+            mod_b, add_b, rem_b = map(set,
+                                      repo.status(node1b, node2, matcher,
+                                                  listsubrepos=subrepos)[:3])
+        else:
+            mod_b, add_b, rem_b = set(), set(), set()
+        modadd = mod_a | add_a | mod_b | add_b
+        common = modadd | rem_a | rem_b
+        if not common:
+            return 0
 
     tmproot = tempfile.mkdtemp(prefix='extdiff.')
     try:
-        # Always make a copy of node1a (and node1b, if applicable)
-        dir1a_files = mod_a | rem_a | ((mod_b | add_b) - add_a)
-        dir1a = snapshot(ui, repo, dir1a_files, node1a, tmproot, subrepos)[0]
-        rev1a = '@%d' % repo[node1a].rev()
-        if do3way:
-            dir1b_files = mod_b | rem_b | ((mod_a | add_a) - add_b)
-            dir1b = snapshot(ui, repo, dir1b_files, node1b, tmproot,
+        if not opts.get('patch'):
+            # Always make a copy of node1a (and node1b, if applicable)
+            dir1a_files = mod_a | rem_a | ((mod_b | add_b) - add_a)
+            dir1a = snapshot(ui, repo, dir1a_files, node1a, tmproot,
                              subrepos)[0]
-            rev1b = '@%d' % repo[node1b].rev()
-        else:
-            dir1b = None
-            rev1b = ''
+            rev1a = '@%d' % repo[node1a].rev()
+            if do3way:
+                dir1b_files = mod_b | rem_b | ((mod_a | add_a) - add_b)
+                dir1b = snapshot(ui, repo, dir1b_files, node1b, tmproot,
+                                 subrepos)[0]
+                rev1b = '@%d' % repo[node1b].rev()
+            else:
+                dir1b = None
+                rev1b = ''
 
-        fns_and_mtime = []
+            fns_and_mtime = []
 
-        # If node2 in not the wc or there is >1 change, copy it
-        dir2root = ''
-        rev2 = ''
-        if node2:
-            dir2 = snapshot(ui, repo, modadd, node2, tmproot, subrepos)[0]
-            rev2 = '@%d' % repo[node2].rev()
-        elif len(common) > 1:
-            #we only actually need to get the files to copy back to
-            #the working dir in this case (because the other cases
-            #are: diffing 2 revisions or single file -- in which case
-            #the file is already directly passed to the diff tool).
-            dir2, fns_and_mtime = snapshot(ui, repo, modadd, None, tmproot,
-                                           subrepos)
-        else:
-            # This lets the diff tool open the changed file directly
-            dir2 = ''
-            dir2root = repo.root
+            # If node2 in not the wc or there is >1 change, copy it
+            dir2root = ''
+            rev2 = ''
+            if node2:
+                dir2 = snapshot(ui, repo, modadd, node2, tmproot, subrepos)[0]
+                rev2 = '@%d' % repo[node2].rev()
+            elif len(common) > 1:
+                #we only actually need to get the files to copy back to
+                #the working dir in this case (because the other cases
+                #are: diffing 2 revisions or single file -- in which case
+                #the file is already directly passed to the diff tool).
+                dir2, fns_and_mtime = snapshot(ui, repo, modadd, None, tmproot,
+                                               subrepos)
+            else:
+                # This lets the diff tool open the changed file directly
+                dir2 = ''
+                dir2root = repo.root
+
+            label1a = rev1a
+            label1b = rev1b
+            label2 = rev2
 
-        label1a = rev1a
-        label1b = rev1b
-        label2 = rev2
-
-        # If only one change, diff the files instead of the directories
-        # Handle bogus modifies correctly by checking if the files exist
-        if len(common) == 1:
-            common_file = util.localpath(common.pop())
-            dir1a = os.path.join(tmproot, dir1a, common_file)
-            label1a = common_file + rev1a
-            if not os.path.isfile(dir1a):
-                dir1a = os.devnull
-            if do3way:
-                dir1b = os.path.join(tmproot, dir1b, common_file)
-                label1b = common_file + rev1b
-                if not os.path.isfile(dir1b):
-                    dir1b = os.devnull
-            dir2 = os.path.join(dir2root, dir2, common_file)
-            label2 = common_file + rev2
+            # If only one change, diff the files instead of the directories
+            # Handle bogus modifies correctly by checking if the files exist
+            if len(common) == 1:
+                common_file = util.localpath(common.pop())
+                dir1a = os.path.join(tmproot, dir1a, common_file)
+                label1a = common_file + rev1a
+                if not os.path.isfile(dir1a):
+                    dir1a = os.devnull
+                if do3way:
+                    dir1b = os.path.join(tmproot, dir1b, common_file)
+                    label1b = common_file + rev1b
+                    if not os.path.isfile(dir1b):
+                        dir1b = os.devnull
+                dir2 = os.path.join(dir2root, dir2, common_file)
+                label2 = common_file + rev2
+        else:
+            template = 'hg-%h.patch'
+            cmdutil.export(repo, [repo[node1a].rev(), repo[node2].rev()],
+                           template=repo.vfs.reljoin(tmproot, template),
+                           match=matcher)
+            label1a = cmdutil.makefilename(repo, template, node1a)
+            label2 = cmdutil.makefilename(repo, template, node2)
+            dir1a = repo.vfs.reljoin(tmproot, label1a)
+            dir2 = repo.vfs.reljoin(tmproot, label2)
+            dir1b = None
+            label1b = None
+            fns_and_mtime = []
 
         # Function to quote file/dir names in the argument string.
         # When not operating in 3-way mode, an empty string is
@@ -255,6 +277,7 @@
      _('pass option to comparison program'), _('OPT')),
     ('r', 'rev', [], _('revision'), _('REV')),
     ('c', 'change', '', _('change made by revision'), _('REV')),
+    ('', 'patch', None, _('compare patches for two revisions'))
     ] + commands.walkopts + commands.subrepoopts,
     _('hg extdiff [OPT]... [FILE]...'),
     inferrepo=True)
--- a/hgext/factotum.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/factotum.py	Tue Oct 20 15:59:10 2015 -0500
@@ -47,7 +47,7 @@
 
 from mercurial.i18n import _
 from mercurial.url import passwordmgr
-from mercurial import httpconnection, util
+from mercurial import httpconnection, error
 import os, urllib2
 
 ERRMAX = 128
@@ -56,7 +56,7 @@
 
 def auth_getkey(self, params):
     if not self.ui.interactive():
-        raise util.Abort(_('factotum not interactive'))
+        raise error.Abort(_('factotum not interactive'))
     if 'user=' not in params:
         params = '%s user?' % params
     params = '%s !password?' % params
@@ -77,10 +77,10 @@
                         if passwd.endswith("'"):
                             passwd = passwd[1:-1].replace("''", "'")
                         else:
-                            raise util.Abort(_('malformed password string'))
+                            raise error.Abort(_('malformed password string'))
                     return (user, passwd)
         except (OSError, IOError):
-            raise util.Abort(_('factotum not responding'))
+            raise error.Abort(_('factotum not responding'))
         finally:
             os.close(fd)
         getkey(self, params)
--- a/hgext/fetch.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/fetch.py	Tue Oct 20 15:59:10 2015 -0500
@@ -60,7 +60,7 @@
     except error.RepoLookupError:
         branchnode = None
     if parent != branchnode:
-        raise util.Abort(_('working directory not at branch tip'),
+        raise error.Abort(_('working directory not at branch tip'),
                          hint=_('use "hg update" to check out branch tip'))
 
     wlock = lock = None
@@ -73,7 +73,7 @@
         bheads = repo.branchheads(branch)
         bheads = [head for head in bheads if len(repo[head].children()) == 0]
         if len(bheads) > 1:
-            raise util.Abort(_('multiple heads in this branch '
+            raise error.Abort(_('multiple heads in this branch '
                                '(use "hg heads ." and "hg merge" to merge)'))
 
         other = hg.peer(repo, opts, ui.expandpath(source))
@@ -86,7 +86,7 @@
             except error.CapabilityError:
                 err = _("other repository doesn't support revision lookup, "
                         "so a rev cannot be specified.")
-                raise util.Abort(err)
+                raise error.Abort(err)
 
         # Are there any changes at all?
         modheads = exchange.pull(repo, other, heads=revs).cgresult
--- a/hgext/gpg.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/gpg.py	Tue Oct 20 15:59:10 2015 -0500
@@ -6,7 +6,7 @@
 '''commands to sign and verify changesets'''
 
 import os, tempfile, binascii
-from mercurial import util, commands, match, cmdutil
+from mercurial import util, commands, match, cmdutil, error
 from mercurial import node as hgnode
 from mercurial.i18n import _
 
@@ -237,7 +237,7 @@
         nodes = [node for node in repo.dirstate.parents()
                  if node != hgnode.nullid]
         if len(nodes) > 1:
-            raise util.Abort(_('uncommitted merge - please provide a '
+            raise error.Abort(_('uncommitted merge - please provide a '
                                'specific revision'))
         if not nodes:
             nodes = [repo.changelog.tip()]
@@ -250,7 +250,7 @@
         data = node2txt(repo, n, sigver)
         sig = mygpg.sign(data)
         if not sig:
-            raise util.Abort(_("error while signing"))
+            raise error.Abort(_("error while signing"))
         sig = binascii.b2a_base64(sig)
         sig = sig.replace("\n", "")
         sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)
@@ -263,7 +263,7 @@
     if not opts["force"]:
         msigs = match.exact(repo.root, '', ['.hgsigs'])
         if any(repo.status(match=msigs, unknown=True, ignored=True)):
-            raise util.Abort(_("working copy of .hgsigs is changed "),
+            raise error.Abort(_("working copy of .hgsigs is changed "),
                              hint=_("please commit .hgsigs manually"))
 
     sigsfile = repo.wfile(".hgsigs", "ab")
@@ -287,7 +287,7 @@
         repo.commit(message, opts['user'], opts['date'], match=msigs,
                     editor=editor)
     except ValueError as inst:
-        raise util.Abort(str(inst))
+        raise error.Abort(str(inst))
 
 def shortkey(ui, key):
     if len(key) != 16:
@@ -301,4 +301,4 @@
     if ver == "0":
         return "%s\n" % hgnode.hex(node)
     else:
-        raise util.Abort(_("unknown signature version"))
+        raise error.Abort(_("unknown signature version"))
--- a/hgext/hgcia.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/hgcia.py	Tue Oct 20 15:59:10 2015 -0500
@@ -43,7 +43,7 @@
 
 from mercurial.i18n import _
 from mercurial.node import bin, short
-from mercurial import cmdutil, patch, util, mail
+from mercurial import cmdutil, patch, util, mail, error
 import email.Parser
 
 import socket, xmlrpclib
@@ -233,7 +233,7 @@
         srv = xmlrpclib.Server(self.ciaurl)
         res = srv.hub.deliver(msg)
         if res is not True and res != 'queued.':
-            raise util.Abort(_('%s returned an error: %s') %
+            raise error.Abort(_('%s returned an error: %s') %
                              (self.ciaurl, res))
 
     def sendemail(self, address, data):
@@ -259,7 +259,7 @@
             ui.write(msg)
         elif cia.ciaurl.startswith('mailto:'):
             if not cia.emailfrom:
-                raise util.Abort(_('email.from must be defined when '
+                raise error.Abort(_('email.from must be defined when '
                                    'sending by email'))
             cia.sendemail(cia.ciaurl[7:], msg)
         else:
--- a/hgext/highlight/__init__.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/highlight/__init__.py	Tue Oct 20 15:59:10 2015 -0500
@@ -13,23 +13,40 @@
 It depends on the Pygments syntax highlighting library:
 http://pygments.org/
 
-There is a single configuration option::
+There are the following configuration options::
 
   [web]
-  pygments_style = <style>
+  pygments_style = <style> (default: colorful)
+  highlightfiles = <fileset> (default: size('<5M'))
+  highlightonlymatchfilename = <bool> (default False)
 
-The default is 'colorful'.
+``highlightonlymatchfilename`` will only highlight files if their type could
+be identified by their filename. When this is not enabled (the default),
+Pygments will try very hard to identify the file type from content and any
+match (even matches with a low confidence score) will be used.
 """
 
 import highlight
 from mercurial.hgweb import webcommands, webutil, common
-from mercurial import extensions, encoding
+from mercurial import extensions, encoding, fileset
 # Note for extension authors: ONLY specify testedwith = 'internal' for
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
 testedwith = 'internal'
 
+def pygmentize(web, field, fctx, tmpl):
+    style = web.config('web', 'pygments_style', 'colorful')
+    expr = web.config('web', 'highlightfiles', "size('<5M')")
+    filenameonly = web.configbool('web', 'highlightonlymatchfilename', False)
+
+    ctx = fctx.changectx()
+    tree = fileset.parse(expr)
+    mctx = fileset.matchctx(ctx, subset=[fctx.path()], status=None)
+    if fctx.path() in fileset.getset(mctx, tree):
+        highlight.pygmentize(field, fctx, style, tmpl,
+                guessfilenameonly=filenameonly)
+
 def filerevision_highlight(orig, web, req, tmpl, fctx):
     mt = ''.join(tmpl('mimetype', encoding=encoding.encoding))
     # only pygmentize for mimetype containing 'html' so we both match
@@ -40,16 +57,16 @@
     # can't clash with the file's content-type here in case we
     # pygmentize a html file
     if 'html' in mt:
-        style = web.config('web', 'pygments_style', 'colorful')
-        highlight.pygmentize('fileline', fctx, style, tmpl)
+        pygmentize(web, 'fileline', fctx, tmpl)
+
     return orig(web, req, tmpl, fctx)
 
 def annotate_highlight(orig, web, req, tmpl):
     mt = ''.join(tmpl('mimetype', encoding=encoding.encoding))
     if 'html' in mt:
         fctx = webutil.filectx(web.repo, req)
-        style = web.config('web', 'pygments_style', 'colorful')
-        highlight.pygmentize('annotateline', fctx, style, tmpl)
+        pygmentize(web, 'annotateline', fctx, tmpl)
+
     return orig(web, req, tmpl)
 
 def generate_css(web, req, tmpl):
--- a/hgext/highlight/highlight.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/highlight/highlight.py	Tue Oct 20 15:59:10 2015 -0500
@@ -20,7 +20,7 @@
 SYNTAX_CSS = ('\n<link rel="stylesheet" href="{url}highlightcss" '
               'type="text/css" />')
 
-def pygmentize(field, fctx, style, tmpl):
+def pygmentize(field, fctx, style, tmpl, guessfilenameonly=False):
 
     # append a <link ...> to the syntax highlighting css
     old_header = tmpl.load('header')
@@ -46,10 +46,21 @@
         lexer = guess_lexer_for_filename(fctx.path(), text[:1024],
                                          stripnl=False)
     except (ClassNotFound, ValueError):
+        # guess_lexer will return a lexer if *any* lexer matches. There is
+        # no way to specify a minimum match score. This can give a high rate of
+        # false positives on files with an unknown filename pattern.
+        if guessfilenameonly:
+            return
+
         try:
             lexer = guess_lexer(text[:1024], stripnl=False)
         except (ClassNotFound, ValueError):
-            lexer = TextLexer(stripnl=False)
+            # Don't highlight unknown files
+            return
+
+    # Don't highlight text files
+    if isinstance(lexer, TextLexer):
+        return
 
     formatter = HtmlFormatter(nowrap=True, style=style)
 
--- a/hgext/histedit.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/histedit.py	Tue Oct 20 15:59:10 2015 -0500
@@ -38,7 +38,7 @@
  #  f, fold = use commit, but combine it with the one above
  #  r, roll = like fold, but discard this commit's description
  #  d, drop = remove commit from history
- #  m, mess = edit message without changing commit content
+ #  m, mess = edit commit message without changing commit content
  #
 
 In this file, lines beginning with ``#`` are ignored. You must specify a rule
@@ -60,7 +60,7 @@
  #  f, fold = use commit, but combine it with the one above
  #  r, roll = like fold, but discard this commit's description
  #  d, drop = remove commit from history
- #  m, mess = edit message without changing commit content
+ #  m, mess = edit commit message without changing commit content
  #
 
 At which point you close the editor and ``histedit`` starts working. When you
@@ -144,7 +144,7 @@
 repo, you can add a ``--force`` option.
 
 Histedit rule lines are truncated to 80 characters by default. You
-can customise this behaviour by setting a different length in your
+can customize this behavior by setting a different length in your
 configuration file::
 
   [histedit]
@@ -160,10 +160,10 @@
 import os
 import sys
 
+from mercurial import bundle2
 from mercurial import cmdutil
 from mercurial import discovery
 from mercurial import error
-from mercurial import changegroup
 from mercurial import copies
 from mercurial import context
 from mercurial import exchange
@@ -198,7 +198,7 @@
 #  f, fold = use commit, but combine it with the one above
 #  r, roll = like fold, but discard this commit's description
 #  d, drop = remove commit from history
-#  m, mess = edit message without changing commit content
+#  m, mess = edit commit message without changing commit content
 #
 """)
 
@@ -225,7 +225,7 @@
         except IOError as err:
             if err.errno != errno.ENOENT:
                 raise
-            raise util.Abort(_('no histedit in progress'))
+            raise error.Abort(_('no histedit in progress'))
 
         try:
             data = pickle.load(fp)
@@ -310,7 +310,11 @@
         return parentctxnode, rules, keep, topmost, replacements, backupfile
 
     def clear(self):
-        self.repo.vfs.unlink('histedit-state')
+        if self.inprogress():
+            self.repo.vfs.unlink('histedit-state')
+
+    def inprogress(self):
+        return self.repo.vfs.exists('histedit-state')
 
 class histeditaction(object):
     def __init__(self, state, node):
@@ -327,7 +331,7 @@
         try:
             node = repo[rulehash].node()
         except error.RepoError:
-            raise util.Abort(_('unknown changeset %s listed') % rulehash[:12])
+            raise error.Abort(_('unknown changeset %s listed') % rulehash[:12])
         return cls(state, node)
 
     def run(self):
@@ -406,7 +410,7 @@
     """Merge changeset from ctx (only) in the current working directory"""
     wcpar = repo.dirstate.parents()[0]
     if ctx.p1().node() == wcpar:
-        # edition ar "in place" we do not need to make any merge,
+        # edits are "in place" we do not need to make any merge,
         # just applies changes on parent for edition
         cmdutil.revert(ui, repo, ctx, (wcpar, node.nullid), all=True)
         stats = None
@@ -435,7 +439,7 @@
         return None
     for c in ctxs:
         if not c.mutable():
-            raise util.Abort(
+            raise error.Abort(
                 _("cannot fold into public change %s") % node.short(c.node()))
     base = first.parents()[0]
 
@@ -557,8 +561,21 @@
                                middlecommits)
 
     def skipprompt(self):
+        """Returns true if the rule should skip the message editor.
+
+        For example, 'fold' wants to show an editor, but 'rollup'
+        doesn't want to.
+        """
         return False
 
+    def mergedescs(self):
+        """Returns true if the rule should merge messages of multiple changes.
+
+        This exists mainly so that 'rollup' rules can be a subclass of
+        'fold'.
+        """
+        return True
+
     def finishfold(self, ui, repo, ctx, oldctx, newnode, internalchanges):
         parent = ctx.parents()[0].node()
         hg.update(repo, parent)
@@ -566,7 +583,7 @@
         commitopts = {}
         commitopts['user'] = ctx.user()
         # commit message
-        if self.skipprompt():
+        if not self.mergedescs():
             newmessage = ctx.description()
         else:
             newmessage = '\n***\n'.join(
@@ -601,7 +618,22 @@
             replacements.append((ich, (n,)))
         return repo[n], replacements
 
+class _multifold(fold):
+    """fold subclass used for when multiple folds happen in a row
+
+    We only want to fire the editor for the folded message once when
+    (say) four changes are folded down into a single change. This is
+    similar to rollup, but we should preserve both messages so that
+    when the last fold operation runs we can show the user all the
+    commit messages in their editor.
+    """
+    def skipprompt(self):
+        return True
+
 class rollup(fold):
+    def mergedescs(self):
+        return False
+
     def skipprompt(self):
         return True
 
@@ -614,10 +646,12 @@
     def commiteditor(self):
         return cmdutil.getcommiteditor(edit=True, editform='histedit.mess')
 
-def findoutgoing(ui, repo, remote=None, force=False, opts={}):
+def findoutgoing(ui, repo, remote=None, force=False, opts=None):
     """utility function to find the first outgoing changeset
 
-    Used by initialisation code"""
+    Used by initialization code"""
+    if opts is None:
+        opts = {}
     dest = ui.expandpath(remote or 'default-push', remote or 'default')
     dest, revs = hg.parseurl(dest, None)[:2]
     ui.status(_('comparing with %s\n') % util.hidepassword(dest))
@@ -630,12 +664,12 @@
 
     outgoing = discovery.findcommonoutgoing(repo, other, revs, force=force)
     if not outgoing.missing:
-        raise util.Abort(_('no outgoing ancestors'))
+        raise error.Abort(_('no outgoing ancestors'))
     roots = list(repo.revs("roots(%ln)", outgoing.missing))
     if 1 < len(roots):
         msg = _('there are ambiguous outgoing revisions')
         hint = _('see "hg help histedit" for more detail')
-        raise util.Abort(msg, hint=hint)
+        raise error.Abort(msg, hint=hint)
     return repo.lookup(roots[0])
 
 actiontable = {'p': pick,
@@ -644,6 +678,7 @@
                'edit': edit,
                'f': fold,
                'fold': fold,
+               '_multifold': _multifold,
                'r': rollup,
                'roll': rollup,
                'd': drop,
@@ -675,9 +710,9 @@
     destination repository. If URL of the destination is omitted, the
     'default-push' (or 'default') path will be used.
 
-    For safety, this command is aborted, also if there are ambiguous
-    outgoing revisions which may confuse users: for example, there are
-    multiple branches containing outgoing revisions.
+    For safety, this command is also aborted if there are ambiguous
+    outgoing revisions which may confuse users: for example, if there
+    are multiple branches containing outgoing revisions.
 
     Use "min(outgoing() and ::.)" or similar revset specification
     instead of --outgoing to specify edit target revision exactly in
@@ -701,7 +736,7 @@
     # blanket if mq patches are applied somewhere
     mq = getattr(repo, 'mq', None)
     if mq and mq.applied:
-        raise util.Abort(_('source has mq patches applied'))
+        raise error.Abort(_('source has mq patches applied'))
 
     # basic argument incompatibility processing
     outg = opts.get('outgoing')
@@ -713,29 +748,29 @@
     revs = opts.get('rev', [])
     goal = 'new' # This invocation goal, in new, continue, abort
     if force and not outg:
-        raise util.Abort(_('--force only allowed with --outgoing'))
+        raise error.Abort(_('--force only allowed with --outgoing'))
     if cont:
         if any((outg, abort, revs, freeargs, rules, editplan)):
-            raise util.Abort(_('no arguments allowed with --continue'))
+            raise error.Abort(_('no arguments allowed with --continue'))
         goal = 'continue'
     elif abort:
         if any((outg, revs, freeargs, rules, editplan)):
-            raise util.Abort(_('no arguments allowed with --abort'))
+            raise error.Abort(_('no arguments allowed with --abort'))
         goal = 'abort'
     elif editplan:
         if any((outg, revs, freeargs)):
-            raise util.Abort(_('only --commands argument allowed with '
+            raise error.Abort(_('only --commands argument allowed with '
                                '--edit-plan'))
         goal = 'edit-plan'
     else:
         if os.path.exists(os.path.join(repo.path, 'histedit-state')):
-            raise util.Abort(_('history edit already in progress, try '
+            raise error.Abort(_('history edit already in progress, try '
                                '--continue or --abort'))
         if outg:
             if revs:
-                raise util.Abort(_('no revisions allowed with --outgoing'))
+                raise error.Abort(_('no revisions allowed with --outgoing'))
             if len(freeargs) > 1:
-                raise util.Abort(
+                raise error.Abort(
                     _('only one repo argument allowed with --outgoing'))
         else:
             revs.extend(freeargs)
@@ -745,7 +780,7 @@
                 if histeditdefault:
                     revs.append(histeditdefault)
             if len(revs) != 1:
-                raise util.Abort(
+                raise error.Abort(
                     _('histedit requires exactly one ancestor revision'))
 
 
@@ -777,30 +812,45 @@
         state.write()
         return
     elif goal == 'abort':
-        state.read()
-        mapping, tmpnodes, leafs, _ntm = processreplacement(state)
-        ui.debug('restore wc to old parent %s\n' % node.short(state.topmost))
+        try:
+            state.read()
+            tmpnodes, leafs = newnodestoabort(state)
+            ui.debug('restore wc to old parent %s\n'
+                    % node.short(state.topmost))
 
-        # Recover our old commits if necessary
-        if not state.topmost in repo and state.backupfile:
-            backupfile = repo.join(state.backupfile)
-            f = hg.openpath(ui, backupfile)
-            gen = exchange.readbundle(ui, f, backupfile)
-            changegroup.addchangegroup(repo, gen, 'histedit',
-                                       'bundle:' + backupfile)
-            os.remove(backupfile)
+            # Recover our old commits if necessary
+            if not state.topmost in repo and state.backupfile:
+                backupfile = repo.join(state.backupfile)
+                f = hg.openpath(ui, backupfile)
+                gen = exchange.readbundle(ui, f, backupfile)
+                tr = repo.transaction('histedit.abort')
+                try:
+                    if not isinstance(gen, bundle2.unbundle20):
+                        gen.apply(repo, 'histedit', 'bundle:' + backupfile)
+                    if isinstance(gen, bundle2.unbundle20):
+                        bundle2.applybundle(repo, gen, tr,
+                                            source='histedit',
+                                            url='bundle:' + backupfile)
+                    tr.close()
+                finally:
+                    tr.release()
 
-        # check whether we should update away
-        parentnodes = [c.node() for c in repo[None].parents()]
-        for n in leafs | set([state.parentctxnode]):
-            if n in parentnodes:
+                os.remove(backupfile)
+
+            # check whether we should update away
+            if repo.unfiltered().revs('parents() and (%n  or %ln::)',
+                                    state.parentctxnode, leafs | tmpnodes):
                 hg.clean(repo, state.topmost)
-                break
-        else:
-            pass
-        cleanupnode(ui, repo, 'created', tmpnodes)
-        cleanupnode(ui, repo, 'temp', leafs)
-        state.clear()
+            cleanupnode(ui, repo, 'created', tmpnodes)
+            cleanupnode(ui, repo, 'temp', leafs)
+        except Exception:
+            if state.inprogress():
+                ui.warn(_('warning: encountered an exception during histedit '
+                    '--abort; the repository may not have been completely '
+                    'cleaned up\n'))
+            raise
+        finally:
+                state.clear()
         return
     else:
         cmdutil.checkunfinished(repo)
@@ -816,13 +866,13 @@
         else:
             rr = list(repo.set('roots(%ld)', scmutil.revrange(repo, revs)))
             if len(rr) != 1:
-                raise util.Abort(_('The specified revisions must have '
+                raise error.Abort(_('The specified revisions must have '
                     'exactly one common root'))
             root = rr[0].node()
 
         revs = between(repo, root, topmost, state.keep)
         if not revs:
-            raise util.Abort(_('%s is not an ancestor of working directory') %
+            raise error.Abort(_('%s is not an ancestor of working directory') %
                              node.short(root))
 
         ctxs = [repo[r] for r in revs]
@@ -854,6 +904,14 @@
                                         'histedit')
         state.backupfile = backupfile
 
+    # preprocess rules so that we can hide inner folds from the user
+    # and only show one editor
+    rules = state.rules[:]
+    for idx, ((action, ha), (nextact, unused)) in enumerate(
+            zip(rules, rules[1:] + [(None, None)])):
+        if action == 'fold' and nextact == 'fold':
+            state.rules[idx] = '_multifold', ha
+
     while state.rules:
         state.write()
         action, ha = state.rules.pop(0)
@@ -879,6 +937,13 @@
                     for n in succs[1:]:
                         ui.debug(m % node.short(n))
 
+    if supportsmarkers:
+        # Only create markers if the temp nodes weren't already removed.
+        obsolete.createmarkers(repo, ((repo[t],()) for t in sorted(tmpnodes)
+                                       if t in repo))
+    else:
+        cleanupnode(ui, repo, 'temp', tmpnodes)
+
     if not state.keep:
         if mapping:
             movebookmarks(ui, repo, mapping, state.topmost, ntm)
@@ -895,7 +960,6 @@
         else:
             cleanupnode(ui, repo, 'replaced', mapping)
 
-    cleanupnode(ui, repo, 'temp', tmpnodes)
     state.clear()
     if os.path.exists(repo.sjoin('undo')):
         os.unlink(repo.sjoin('undo'))
@@ -912,7 +976,7 @@
             actobj.continuedirty()
             s = repo.status()
             if s.modified or s.added or s.removed or s.deleted:
-                raise util.Abort(_("working copy still dirty"))
+                raise error.Abort(_("working copy still dirty"))
 
         parentctx, replacements = actobj.continueclean()
 
@@ -929,12 +993,12 @@
     if ctxs and not keep:
         if (not obsolete.isenabled(repo, obsolete.allowunstableopt) and
             repo.revs('(%ld::) - (%ld)', ctxs, ctxs)):
-            raise util.Abort(_('cannot edit history that would orphan nodes'))
+            raise error.Abort(_('cannot edit history that would orphan nodes'))
         if repo.revs('(%ld) and merge()', ctxs):
-            raise util.Abort(_('cannot edit history that contains merges'))
+            raise error.Abort(_('cannot edit history that contains merges'))
         root = ctxs[0] # list is already sorted by repo.set
         if not root.mutable():
-            raise util.Abort(_('cannot edit public changeset: %s') % root,
+            raise error.Abort(_('cannot edit public changeset: %s') % root,
                              hint=_('see "hg help phases" for details'))
     return [c.node() for c in ctxs]
 
@@ -985,30 +1049,49 @@
     seen = set()
     for r in rules:
         if ' ' not in r:
-            raise util.Abort(_('malformed line "%s"') % r)
+            raise error.Abort(_('malformed line "%s"') % r)
         action, rest = r.split(' ', 1)
         ha = rest.strip().split(' ', 1)[0]
         try:
             ha = repo[ha].hex()
         except error.RepoError:
-            raise util.Abort(_('unknown changeset %s listed') % ha[:12])
+            raise error.Abort(_('unknown changeset %s listed') % ha[:12])
         if ha not in expected:
-            raise util.Abort(
+            raise error.Abort(
                 _('may not use changesets other than the ones listed'))
         if ha in seen:
-            raise util.Abort(_('duplicated command for changeset %s') %
+            raise error.Abort(_('duplicated command for changeset %s') %
                     ha[:12])
         seen.add(ha)
-        if action not in actiontable:
-            raise util.Abort(_('unknown action "%s"') % action)
+        if action not in actiontable or action.startswith('_'):
+            raise error.Abort(_('unknown action "%s"') % action)
         parsed.append([action, ha])
     missing = sorted(expected - seen)  # sort to stabilize output
     if missing:
-        raise util.Abort(_('missing rules for changeset %s') %
+        raise error.Abort(_('missing rules for changeset %s') %
                 missing[0][:12],
                 hint=_('do you want to use the drop action?'))
     return parsed
 
+def newnodestoabort(state):
+    """process the list of replacements to return
+
+    1) the list of final node
+    2) the list of temporary node
+
+    This meant to be used on abort as less data are required in this case.
+    """
+    replacements = state.replacements
+    allsuccs = set()
+    replaced = set()
+    for rep in replacements:
+        allsuccs.update(rep[1])
+        replaced.add(rep[0])
+    newnodes = allsuccs - replaced
+    tmpnodes = allsuccs & replaced
+    return newnodes, tmpnodes
+
+
 def processreplacement(state):
     """process the list of replacements to return
 
@@ -1019,15 +1102,15 @@
     allsuccs = set()
     replaced = set()
     fullmapping = {}
-    # initialise basic set
-    # fullmapping record all operation recorded in replacement
+    # initialize basic set
+    # fullmapping records all operations recorded in replacement
     for rep in replacements:
         allsuccs.update(rep[1])
         replaced.add(rep[0])
         fullmapping.setdefault(rep[0], set()).update(rep[1])
     new = allsuccs - replaced
     tmpnodes = allsuccs & replaced
-    # Reduce content fullmapping  into direct relation between original nodes
+    # Reduce content fullmapping into direct relation between original nodes
     # and final node created during history edition
     # Dropped changeset are replaced by an empty list
     toproceed = set(fullmapping)
@@ -1113,8 +1196,12 @@
     lock = None
     try:
         lock = repo.lock()
-        # Find all node that need to be stripped
-        # (we hg %lr instead of %ln to silently ignore unknown item
+        # do not let filtering get in the way of the cleanse
+        # we should probably get rid of obsolescence marker created during the
+        # histedit, but we currently do not have such information.
+        repo = repo.unfiltered()
+        # Find all nodes that need to be stripped
+        # (we use %lr instead of %ln to silently ignore unknown items)
         nm = repo.changelog.nodemap
         nodes = sorted(n for n in nodes if n in nm)
         roots = [c.node() for c in repo.set("roots(%ln)", nodes)]
@@ -1137,7 +1224,7 @@
         strip_nodes = set([repo[n].node() for n in nodelist])
         common_nodes = histedit_nodes & strip_nodes
         if common_nodes:
-            raise util.Abort(_("histedit in progress, can't strip %s")
+            raise error.Abort(_("histedit in progress, can't strip %s")
                              % ', '.join(node.short(x) for x in common_nodes))
     return orig(ui, repo, nodelist, *args, **kwargs)
 
--- a/hgext/keyword.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/keyword.py	Tue Oct 20 15:59:10 2015 -0500
@@ -15,7 +15,7 @@
 # audience not running a version control system.
 #
 # For in-depth discussion refer to
-# <http://mercurial.selenic.com/wiki/KeywordPlan>.
+# <https://mercurial-scm.org/wiki/KeywordPlan>.
 #
 # Keyword expansion is based on Mercurial's changeset template mappings.
 #
@@ -83,7 +83,7 @@
 '''
 
 from mercurial import commands, context, cmdutil, dispatch, filelog, extensions
-from mercurial import localrepo, match, patch, templatefilters, util
+from mercurial import localrepo, match, patch, templatefilters, util, error
 from mercurial import scmutil, pathutil
 from mercurial.hgweb import webcommands
 from mercurial.i18n import _
@@ -348,20 +348,20 @@
         return repo.status(match=scmutil.match(wctx, pats, opts), clean=True,
                            unknown=opts.get('unknown') or opts.get('all'))
     if ui.configitems('keyword'):
-        raise util.Abort(_('[keyword] patterns cannot match'))
-    raise util.Abort(_('no [keyword] patterns configured'))
+        raise error.Abort(_('[keyword] patterns cannot match'))
+    raise error.Abort(_('no [keyword] patterns configured'))
 
 def _kwfwrite(ui, repo, expand, *pats, **opts):
     '''Selects files and passes them to kwtemplater.overwrite.'''
     wctx = repo[None]
     if len(wctx.parents()) > 1:
-        raise util.Abort(_('outstanding uncommitted merge'))
+        raise error.Abort(_('outstanding uncommitted merge'))
     kwt = kwtools['templater']
     wlock = repo.wlock()
     try:
         status = _status(ui, repo, wctx, kwt, *pats, **opts)
         if status.modified or status.added or status.removed or status.deleted:
-            raise util.Abort(_('outstanding uncommitted changes'))
+            raise error.Abort(_('outstanding uncommitted changes'))
         kwt.overwrite(wctx, status.clean, True, expand)
     finally:
         wlock.release()
@@ -623,6 +623,7 @@
 
         def rollback(self, dryrun=False, force=False):
             wlock = self.wlock()
+            origrestrict = kwt.restrict
             try:
                 if not dryrun:
                     changed = self['.'].files()
@@ -630,10 +631,12 @@
                 if not dryrun:
                     ctx = self['.']
                     modified, added = _preselect(ctx.status(), changed)
+                    kwt.restrict = False
                     kwt.overwrite(ctx, modified, True, True)
                     kwt.overwrite(ctx, added, True, False)
                 return ret
             finally:
+                kwt.restrict = origrestrict
                 wlock.release()
 
     # monkeypatches
--- a/hgext/largefiles/basestore.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/largefiles/basestore.py	Tue Oct 20 15:59:10 2015 -0500
@@ -10,7 +10,7 @@
 
 import re
 
-from mercurial import util, node, hg
+from mercurial import util, node, hg, error
 from mercurial.i18n import _
 
 import lfutil
@@ -209,7 +209,7 @@
     try:
         storeproviders = _storeprovider[scheme]
     except KeyError:
-        raise util.Abort(_('unsupported URL scheme %r') % scheme)
+        raise error.Abort(_('unsupported URL scheme %r') % scheme)
 
     for classobj in storeproviders:
         try:
@@ -217,5 +217,5 @@
         except lfutil.storeprotonotcapable:
             pass
 
-    raise util.Abort(_('%s does not appear to be a largefile store') %
+    raise error.Abort(_('%s does not appear to be a largefile store') %
                      util.hidepassword(path))
--- a/hgext/largefiles/lfcommands.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/largefiles/lfcommands.py	Tue Oct 20 15:59:10 2015 -0500
@@ -62,9 +62,9 @@
         size = lfutil.getminsize(ui, True, opts.get('size'), default=None)
 
     if not hg.islocal(src):
-        raise util.Abort(_('%s is not a local Mercurial repo') % src)
+        raise error.Abort(_('%s is not a local Mercurial repo') % src)
     if not hg.islocal(dest):
-        raise util.Abort(_('%s is not a local Mercurial repo') % dest)
+        raise error.Abort(_('%s is not a local Mercurial repo') % dest)
 
     rsrc = hg.repository(ui, src)
     ui.status(_('initializing destination %s\n') % dest)
@@ -139,7 +139,7 @@
                     path = lfutil.findfile(rsrc, hash)
 
                     if path is None:
-                        raise util.Abort(_("missing largefile for \'%s\' in %s")
+                        raise error.Abort(_("missing largefile for '%s' in %s")
                                           % (realname, realrev))
                     fp = open(path, 'rb')
 
@@ -157,7 +157,7 @@
 
             found, missing = downloadlfiles(ui, rsrc)
             if missing != 0:
-                raise util.Abort(_("all largefiles must be present locally"))
+                raise error.Abort(_("all largefiles must be present locally"))
 
             orig = convcmd.converter
             convcmd.converter = converter
@@ -196,7 +196,7 @@
                 islfile |= renamedlfile
                 if 'l' in fctx.flags():
                     if renamedlfile:
-                        raise util.Abort(
+                        raise error.Abort(
                             _('renamed/copied largefile %s becomes symlink')
                             % f)
                     islfile = False
@@ -213,7 +213,7 @@
                 if 'l' in fctx.flags():
                     renamed = fctx.renamed()
                     if renamed and renamed[0] in lfiles:
-                        raise util.Abort(_('largefile %s becomes symlink') % f)
+                        raise error.Abort(_('largefile %s becomes symlink') % f)
 
                 # largefile was modified, update standins
                 m = util.sha1('')
@@ -355,7 +355,7 @@
                     total=len(files))
         source = lfutil.findfile(rsrc, hash)
         if not source:
-            raise util.Abort(_('largefile %s missing from store'
+            raise error.Abort(_('largefile %s missing from store'
                                ' (needs to be uploaded)') % hash)
         # XXX check for errors here
         store.put(source, hash)
@@ -539,7 +539,7 @@
 
     revs = opts.get('rev', [])
     if not revs:
-        raise util.Abort(_('no revisions specified'))
+        raise error.Abort(_('no revisions specified'))
     revs = scmutil.revrange(repo, revs)
 
     numcached = 0
--- a/hgext/largefiles/lfutil.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/largefiles/lfutil.py	Tue Oct 20 15:59:10 2015 -0500
@@ -16,7 +16,7 @@
 
 from mercurial import dirstate, httpconnection, match as match_, util, scmutil
 from mercurial.i18n import _
-from mercurial import node
+from mercurial import node, error
 
 shortname = '.hglf'
 shortnameslash = shortname + '/'
@@ -33,10 +33,10 @@
         try:
             lfsize = float(lfsize)
         except ValueError:
-            raise util.Abort(_('largefiles: size must be number (not %s)\n')
+            raise error.Abort(_('largefiles: size must be number (not %s)\n')
                              % lfsize)
     if lfsize is None:
-        raise util.Abort(_('minimum size for largefiles must be specified'))
+        raise error.Abort(_('minimum size for largefiles must be specified'))
     return lfsize
 
 def link(src, dest):
@@ -74,7 +74,7 @@
                 if home:
                     path = os.path.join(home, '.cache', longname, hash)
         else:
-            raise util.Abort(_('unknown operating system: %s\n') % os.name)
+            raise error.Abort(_('unknown operating system: %s\n') % os.name)
     return path
 
 def inusercache(ui, hash):
@@ -110,6 +110,11 @@
         return super(largefilesdirstate, self).normallookup(unixpath(f))
     def _ignore(self, f):
         return False
+    def write(self, tr=False):
+        # (1) disable PENDING mode always
+        #     (lfdirstate isn't yet managed as a part of the transaction)
+        # (2) avoid develwarn 'use dirstate.write with ....'
+        super(largefilesdirstate, self).write(None)
 
 def openlfdirstate(ui, repo, create=True):
     '''
@@ -399,7 +404,8 @@
     else:
         state, mtime = '?', -1
     if state == 'n':
-        if normallookup or mtime < 0:
+        if (normallookup or mtime < 0 or
+            not os.path.exists(repo.wjoin(lfile))):
             # state 'n' doesn't ensure 'clean' in this case
             lfdirstate.normallookup(lfile)
         else:
--- a/hgext/largefiles/overrides.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/largefiles/overrides.py	Tue Oct 20 15:59:10 2015 -0500
@@ -12,7 +12,7 @@
 import copy
 
 from mercurial import hg, util, cmdutil, scmutil, match as match_, \
-        archival, pathutil, revset
+        archival, pathutil, revset, error
 from mercurial.i18n import _
 
 import lfutil
@@ -50,8 +50,10 @@
 
 def installnormalfilesmatchfn(manifest):
     '''installmatchfn with a matchfn that ignores all largefiles'''
-    def overridematch(ctx, pats=[], opts={}, globbed=False,
+    def overridematch(ctx, pats=(), opts=None, globbed=False,
             default='relpath', badfn=None):
+        if opts is None:
+            opts = {}
         match = oldmatch(ctx, pats, opts, globbed, default, badfn=badfn)
         return composenormalfilematcher(match, manifest)
     oldmatch = installmatchfn(overridematch)
@@ -243,7 +245,7 @@
 
 def overrideadd(orig, ui, repo, *pats, **opts):
     if opts.get('normal') and opts.get('large'):
-        raise util.Abort(_('--normal cannot be used with --large'))
+        raise error.Abort(_('--normal cannot be used with --large'))
     return orig(ui, repo, *pats, **opts)
 
 def cmdutiladd(orig, ui, repo, matcher, prefix, explicitonly, **opts):
@@ -287,13 +289,15 @@
         repo._repo.lfstatus = False
 
 def overridelog(orig, ui, repo, *pats, **opts):
-    def overridematchandpats(ctx, pats=[], opts={}, globbed=False,
+    def overridematchandpats(ctx, pats=(), opts=None, globbed=False,
             default='relpath', badfn=None):
         """Matcher that merges root directory with .hglf, suitable for log.
         It is still possible to match .hglf directly.
         For any listed files run log on the standin too.
         matchfn tries both the given filename and with .hglf stripped.
         """
+        if opts is None:
+            opts = {}
         matchandpats = oldmatchandpats(ctx, pats, opts, globbed, default,
                                        badfn=badfn)
         m, p = copy.copy(matchandpats)
@@ -320,7 +324,7 @@
             back = util.pconvert(m.rel(hglf)[:-len(hglf)])
 
             def tostandin(f):
-                # The file may already be a standin, so trucate the back
+                # The file may already be a standin, so truncate the back
                 # prefix and test before mangling it.  This avoids turning
                 # 'glob:../.hglf/foo*' into 'glob:../.hglf/../.hglf/foo*'.
                 if f.startswith(back) and lfutil.splitstandin(f[len(back):]):
@@ -532,9 +536,11 @@
 
 # Override filemerge to prompt the user about how they wish to merge
 # largefiles. This will handle identical edits without prompting the user.
-def overridefilemerge(origfn, repo, mynode, orig, fcd, fco, fca, labels=None):
+def overridefilemerge(origfn, premerge, repo, mynode, orig, fcd, fco, fca,
+                      labels=None):
     if not lfutil.isstandin(orig):
-        return origfn(repo, mynode, orig, fcd, fco, fca, labels=labels)
+        return origfn(premerge, repo, mynode, orig, fcd, fco, fca,
+                      labels=labels)
 
     ahash = fca.data().strip().lower()
     dhash = fcd.data().strip().lower()
@@ -549,7 +555,7 @@
                (lfutil.splitstandin(orig), ahash, dhash, ohash),
              0) == 1)):
         repo.wwrite(fcd.path(), fco.data(), fco.flags())
-    return 0
+    return True, 0
 
 def copiespathcopies(orig, ctx1, ctx2, match=None):
     copies = orig(ctx1, ctx2, match=match)
@@ -580,7 +586,7 @@
     installnormalfilesmatchfn(repo[None].manifest())
     try:
         result = orig(ui, repo, pats, opts, rename)
-    except util.Abort as e:
+    except error.Abort as e:
         if str(e) != _('no files to copy'):
             raise e
         else:
@@ -613,8 +619,10 @@
         wlock = repo.wlock()
 
         manifest = repo[None].manifest()
-        def overridematch(ctx, pats=[], opts={}, globbed=False,
+        def overridematch(ctx, pats=(), opts=None, globbed=False,
                 default='relpath', badfn=None):
+            if opts is None:
+                opts = {}
             newpats = []
             # The patterns were previously mangled to add the standin
             # directory; we need to remove that now
@@ -682,7 +690,7 @@
 
                 lfdirstate.add(destlfile)
         lfdirstate.write()
-    except util.Abort as e:
+    except error.Abort as e:
         if str(e) != _('no files to copy'):
             raise e
         else:
@@ -692,7 +700,7 @@
         wlock.release()
 
     if nolfiles and nonormalfiles:
-        raise util.Abort(_('no files to copy'))
+        raise error.Abort(_('no files to copy'))
 
     return result
 
@@ -722,8 +730,10 @@
 
         oldstandins = lfutil.getstandinsstate(repo)
 
-        def overridematch(mctx, pats=[], opts={}, globbed=False,
+        def overridematch(mctx, pats=(), opts=None, globbed=False,
                 default='relpath', badfn=None):
+            if opts is None:
+                opts = {}
             match = oldmatch(mctx, pats, opts, globbed, default, badfn=badfn)
             m = copy.copy(match)
 
@@ -819,7 +829,7 @@
     try:
         firstpulled = repo.firstpulled
     except AttributeError:
-        raise util.Abort(_("pulled() only available in --lfrev"))
+        raise error.Abort(_("pulled() only available in --lfrev"))
     return revset.baseset([r for r in subset if r >= firstpulled])
 
 def overrideclone(orig, ui, source, dest=None, **opts):
@@ -827,7 +837,7 @@
     if d is None:
         d = hg.defaultdest(source)
     if opts.get('all_largefiles') and not hg.islocal(d):
-            raise util.Abort(_(
+            raise error.Abort(_(
             '--all-largefiles is incompatible with non-local destination %s') %
             d)
 
@@ -908,13 +918,13 @@
         lfcommands.cachelfiles(repo.ui, repo, node)
 
     if kind not in archival.archivers:
-        raise util.Abort(_("unknown archive type '%s'") % kind)
+        raise error.Abort(_("unknown archive type '%s'") % kind)
 
     ctx = repo[node]
 
     if kind == 'files':
         if prefix:
-            raise util.Abort(
+            raise error.Abort(
                 _('cannot give prefix when archiving to files'))
     else:
         prefix = archival.tidyprefix(dest, kind, prefix)
@@ -941,7 +951,7 @@
                 path = lfutil.findfile(repo, getdata().strip())
 
                 if path is None:
-                    raise util.Abort(
+                    raise error.Abort(
                        _('largefile %s not found in repo store or system cache')
                        % lfutil.splitstandin(f))
             else:
@@ -998,7 +1008,7 @@
                 path = lfutil.findfile(repo._repo, getdata().strip())
 
                 if path is None:
-                    raise util.Abort(
+                    raise error.Abort(
                        _('largefile %s not found in repo store or system cache')
                        % lfutil.splitstandin(f))
             else:
@@ -1035,7 +1045,7 @@
     s = repo.status()
     repo.lfstatus = False
     if s.modified or s.added or s.removed or s.deleted:
-        raise util.Abort(_('uncommitted changes'))
+        raise error.Abort(_('uncommitted changes'))
 
 def cmdutilforget(orig, ui, repo, match, prefix, explicitonly):
     normalmatcher = composenormalfilematcher(match, repo[None].manifest())
@@ -1174,8 +1184,10 @@
     finally:
         repo.lfstatus = False
 
-def scmutiladdremove(orig, repo, matcher, prefix, opts={}, dry_run=None,
+def scmutiladdremove(orig, repo, matcher, prefix, opts=None, dry_run=None,
                      similarity=None):
+    if opts is None:
+        opts = {}
     if not lfutil.islfilesrepo(repo):
         return orig(repo, matcher, prefix, opts, dry_run, similarity)
     # Get the list of missing largefiles so we can remove them
@@ -1334,7 +1346,7 @@
                 store = basestore._openstore(repo)
                 success, missing = store.get([(lf, hash)])
                 if len(success) != 1:
-                    raise util.Abort(
+                    raise error.Abort(
                         _('largefile %s is not in cache and could not be '
                           'downloaded')  % lf)
             path = lfutil.usercachepath(repo.ui, hash)
--- a/hgext/largefiles/proto.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/largefiles/proto.py	Tue Oct 20 15:59:10 2015 -0500
@@ -51,7 +51,8 @@
     cache.'''
     filename = lfutil.findfile(repo, sha)
     if not filename:
-        raise util.Abort(_('requested largefile %s not present in cache') % sha)
+        raise error.Abort(_('requested largefile %s not present in cache')
+                          % sha)
     f = open(filename, 'rb')
     length = os.fstat(f.fileno())[6]
 
--- a/hgext/largefiles/remotestore.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/largefiles/remotestore.py	Tue Oct 20 15:59:10 2015 -0500
@@ -8,7 +8,7 @@
 
 import urllib2
 
-from mercurial import util, wireproto
+from mercurial import util, wireproto, error
 from mercurial.i18n import _
 
 import lfutil
@@ -21,7 +21,7 @@
 
     def put(self, source, hash):
         if self.sendfile(source, hash):
-            raise util.Abort(
+            raise error.Abort(
                 _('remotestore: could not put %s to remote store %s')
                 % (source, util.hidepassword(self.url)))
         self.ui.debug(
@@ -39,7 +39,7 @@
             fd = lfutil.httpsendfile(self.ui, filename)
             return self._put(hash, fd)
         except IOError as e:
-            raise util.Abort(
+            raise error.Abort(
                 _('remotestore: could not open file %s: %s')
                 % (filename, str(e)))
         finally:
@@ -50,14 +50,14 @@
         try:
             chunks = self._get(hash)
         except urllib2.HTTPError as e:
-            # 401s get converted to util.Aborts; everything else is fine being
+            # 401s get converted to error.Aborts; everything else is fine being
             # turned into a StoreError
             raise basestore.StoreError(filename, hash, self.url, str(e))
         except urllib2.URLError as e:
             # This usually indicates a connection problem, so don't
             # keep trying with the other files... they will probably
             # all fail too.
-            raise util.Abort('%s: %s' %
+            raise error.Abort('%s: %s' %
                              (util.hidepassword(self.url), e.reason))
         except IOError as e:
             raise basestore.StoreError(filename, hash, self.url, str(e))
--- a/hgext/largefiles/reposetup.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/largefiles/reposetup.py	Tue Oct 20 15:59:10 2015 -0500
@@ -10,7 +10,7 @@
 import copy
 import os
 
-from mercurial import error, match as match_, util
+from mercurial import error, match as match_, error
 from mercurial.i18n import _
 from mercurial import scmutil, localrepo
 
@@ -280,7 +280,7 @@
                     msg = _("required features are not"
                             " supported in the destination:"
                             " %s") % (', '.join(sorted(missing)))
-                    raise util.Abort(msg)
+                    raise error.Abort(msg)
             return super(lfilesrepo, self).push(remote, force=force, revs=revs,
                 newbranch=newbranch)
 
@@ -304,7 +304,7 @@
 
             for f in files:
                 if lfutil.isstandin(f + '/'):
-                    raise util.Abort(
+                    raise error.Abort(
                         _('file "%s" is a largefile standin') % f,
                         hint=('commit the largefile itself instead'))
                 # Scan directories
--- a/hgext/largefiles/uisetup.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/largefiles/uisetup.py	Tue Oct 20 15:59:10 2015 -0500
@@ -102,7 +102,7 @@
                                     overrides.mergerecordupdates)
     entry = extensions.wrapfunction(merge, 'update',
                                     overrides.mergeupdate)
-    entry = extensions.wrapfunction(filemerge, 'filemerge',
+    entry = extensions.wrapfunction(filemerge, '_filemerge',
                                     overrides.overridefilemerge)
     entry = extensions.wrapfunction(cmdutil, 'copy',
                                     overrides.overridecopy)
--- a/hgext/mq.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/mq.py	Tue Oct 20 15:59:10 2015 -0500
@@ -28,7 +28,7 @@
 
 By default, mq will automatically use git patches when required to
 avoid losing file mode changes, copy records, binary files or empty
-files creations or deletions. This behaviour can be configured with::
+files creations or deletions. This behavior can be configured with::
 
   [mq]
   git = auto/keep/yes/no
@@ -395,6 +395,17 @@
 class AbortNoCleanup(error.Abort):
     pass
 
+def makepatchname(existing, title):
+    """Return a suitable filename for title, adding a suffix to make
+    it unique in the existing list"""
+    namebase = re.sub('[\s\W_]+', '_', title.lower()).strip('_')
+    name = namebase
+    i = 0
+    while name in existing:
+        i += 1
+        name = '%s__%s' % (namebase, i)
+    return name
+
 class queue(object):
     def __init__(self, ui, baseui, path, patchdir=None):
         self.basepath = path
@@ -483,7 +494,7 @@
         self.guardsdirty = False
         self.activeguards = None
 
-    def diffopts(self, opts={}, patchfn=None):
+    def diffopts(self, opts=None, patchfn=None):
         diffopts = patchmod.diffopts(self.ui, opts)
         if self.gitmode == 'auto':
             diffopts.upgrade = True
@@ -492,7 +503,7 @@
         elif self.gitmode in ('yes', 'no'):
             diffopts.git = self.gitmode == 'yes'
         else:
-            raise util.Abort(_('mq.git option can be auto/keep/yes/no'
+            raise error.Abort(_('mq.git option can be auto/keep/yes/no'
                                ' got %s') % self.gitmode)
         if patchfn:
             diffopts = self.patchopts(diffopts, patchfn)
@@ -544,7 +555,7 @@
             patch = patch.strip()
             if patch:
                 if patch in self.series:
-                    raise util.Abort(_('%s appears more than once in %s') %
+                    raise error.Abort(_('%s appears more than once in %s') %
                                      (patch, self.join(self.seriespath)))
                 self.series.append(patch)
                 self.seriesguards.append(self.guard_re.findall(comment))
@@ -565,7 +576,7 @@
         for guard in guards:
             bad = self.checkguard(guard)
             if bad:
-                raise util.Abort(bad)
+                raise error.Abort(bad)
         guards = sorted(set(guards))
         self.ui.debug('active guards: %s\n' % ' '.join(guards))
         self.activeguards = guards
@@ -592,12 +603,12 @@
     def setguards(self, idx, guards):
         for g in guards:
             if len(g) < 2:
-                raise util.Abort(_('guard %r too short') % g)
+                raise error.Abort(_('guard %r too short') % g)
             if g[0] not in '-+':
-                raise util.Abort(_('guard %r starts with invalid char') % g)
+                raise error.Abort(_('guard %r starts with invalid char') % g)
             bad = self.checkguard(g[1:])
             if bad:
-                raise util.Abort(bad)
+                raise error.Abort(bad)
         drop = self.guard_re.sub('', self.fullseries[idx])
         self.fullseries[idx] = drop + ''.join([' #' + g for g in guards])
         self.parseseries()
@@ -708,7 +719,7 @@
             return (err, n)
 
         if n is None:
-            raise util.Abort(_("apply failed for patch %s") % patch)
+            raise error.Abort(_("apply failed for patch %s") % patch)
 
         self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
 
@@ -719,14 +730,14 @@
         ctx = repo[rev]
         ret = hg.merge(repo, rev)
         if ret:
-            raise util.Abort(_("update returned %d") % ret)
+            raise error.Abort(_("update returned %d") % ret)
         n = newcommit(repo, None, ctx.description(), ctx.user(), force=True)
         if n is None:
-            raise util.Abort(_("repo commit failed"))
+            raise error.Abort(_("repo commit failed"))
         try:
             ph = patchheader(mergeq.join(patch), self.plainmode)
         except Exception:
-            raise util.Abort(_("unable to read %s") % patch)
+            raise error.Abort(_("unable to read %s") % patch)
 
         diffopts = self.patchopts(diffopts, patch)
         patchf = self.opener(patch, "w")
@@ -816,10 +827,9 @@
     def apply(self, repo, series, list=False, update_status=True,
               strict=False, patchdir=None, merge=None, all_files=None,
               tobackup=None, keepchanges=False):
-        wlock = dsguard = lock = tr = None
+        wlock = lock = tr = None
         try:
             wlock = repo.wlock()
-            dsguard = cmdutil.dirstateguard(repo, 'mq.apply')
             lock = repo.lock()
             tr = repo.transaction("qpush")
             try:
@@ -828,12 +838,10 @@
                                   tobackup=tobackup, keepchanges=keepchanges)
                 tr.close()
                 self.savedirty()
-                dsguard.close()
                 return ret
             except AbortNoCleanup:
                 tr.close()
                 self.savedirty()
-                dsguard.close()
                 raise
             except: # re-raises
                 try:
@@ -843,7 +851,7 @@
                     self.invalidate()
                 raise
         finally:
-            release(tr, lock, dsguard, wlock)
+            release(tr, lock, wlock)
             self.removeundo(repo)
 
     def _apply(self, repo, series, list=False, update_status=True,
@@ -934,9 +942,9 @@
             n = newcommit(repo, None, message, ph.user, ph.date, match=match,
                           force=True)
             if repo['tip'] == oldtip:
-                raise util.Abort(_("qpush exactly duplicates child changeset"))
+                raise error.Abort(_("qpush exactly duplicates child changeset"))
             if n is None:
-                raise util.Abort(_("repository commit failed"))
+                raise error.Abort(_("repository commit failed"))
 
             if update_status:
                 self.applied.append(statusentry(n, patchname))
@@ -988,7 +996,7 @@
                     self.ui.warn(msg % (short(rev[p]), p))
             else:
                 msg = _('unknown patches: %s\n')
-                raise util.Abort(''.join(msg % p for p in unknown))
+                raise error.Abort(''.join(msg % p for p in unknown))
 
         self.parseseries()
         self.seriesdirty = True
@@ -1000,13 +1008,13 @@
         for i, rev in enumerate(revs):
 
             if rev < firstrev:
-                raise util.Abort(_('revision %d is not managed') % rev)
+                raise error.Abort(_('revision %d is not managed') % rev)
 
             ctx = repo[rev]
             base = self.applied[i].node
             if ctx.node() != base:
                 msg = _('cannot delete revision %d above applied patches')
-                raise util.Abort(msg % rev)
+                raise error.Abort(msg % rev)
 
             patch = self.applied[i].name
             for fmt in ('[mq]: %s', 'imported patch %s'):
@@ -1038,7 +1046,7 @@
 
     def delete(self, repo, patches, opts):
         if not patches and not opts.get('rev'):
-            raise util.Abort(_('qdelete requires at least one revision or '
+            raise error.Abort(_('qdelete requires at least one revision or '
                                'patch name'))
 
         realpatches = []
@@ -1046,16 +1054,16 @@
             patch = self.lookup(patch, strict=True)
             info = self.isapplied(patch)
             if info:
-                raise util.Abort(_("cannot delete applied patch %s") % patch)
+                raise error.Abort(_("cannot delete applied patch %s") % patch)
             if patch not in self.series:
-                raise util.Abort(_("patch %s not in series file") % patch)
+                raise error.Abort(_("patch %s not in series file") % patch)
             if patch not in realpatches:
                 realpatches.append(patch)
 
         numrevs = 0
         if opts.get('rev'):
             if not self.applied:
-                raise util.Abort(_('no patches applied'))
+                raise error.Abort(_('no patches applied'))
             revs = scmutil.revrange(repo, opts.get('rev'))
             revs.sort()
             revpatches = self._revpatches(repo, revs)
@@ -1070,7 +1078,7 @@
             top = self.applied[-1].node
             patch = self.applied[-1].name
             if repo.dirstate.p1() != top:
-                raise util.Abort(_("working directory revision is not qtip"))
+                raise error.Abort(_("working directory revision is not qtip"))
             return top, patch
         return None, None
 
@@ -1089,38 +1097,38 @@
     def checklocalchanges(self, repo, force=False, refresh=True):
         excsuffix = ''
         if refresh:
-            excsuffix = ', refresh first'
+            excsuffix = ', qrefresh first'
             # plain versions for i18n tool to detect them
-            _("local changes found, refresh first")
-            _("local changed subrepos found, refresh first")
+            _("local changes found, qrefresh first")
+            _("local changed subrepos found, qrefresh first")
         return checklocalchanges(repo, force, excsuffix)
 
     _reserved = ('series', 'status', 'guards', '.', '..')
     def checkreservedname(self, name):
         if name in self._reserved:
-            raise util.Abort(_('"%s" cannot be used as the name of a patch')
+            raise error.Abort(_('"%s" cannot be used as the name of a patch')
                              % name)
         for prefix in ('.hg', '.mq'):
             if name.startswith(prefix):
-                raise util.Abort(_('patch name cannot begin with "%s"')
+                raise error.Abort(_('patch name cannot begin with "%s"')
                                  % prefix)
         for c in ('#', ':', '\r', '\n'):
             if c in name:
-                raise util.Abort(_('%r cannot be used in the name of a patch')
+                raise error.Abort(_('%r cannot be used in the name of a patch')
                                  % c)
 
     def checkpatchname(self, name, force=False):
         self.checkreservedname(name)
         if not force and os.path.exists(self.join(name)):
             if os.path.isdir(self.join(name)):
-                raise util.Abort(_('"%s" already exists as a directory')
+                raise error.Abort(_('"%s" already exists as a directory')
                                  % name)
             else:
-                raise util.Abort(_('patch "%s" already exists') % name)
+                raise error.Abort(_('patch "%s" already exists') % name)
 
     def checkkeepchanges(self, keepchanges, force):
         if force and keepchanges:
-            raise util.Abort(_('cannot use both --force and --keep-changes'))
+            raise error.Abort(_('cannot use both --force and --keep-changes'))
 
     def new(self, repo, patchfn, *pats, **opts):
         """options:
@@ -1143,7 +1151,7 @@
             # detect missing files in pats
             def badfn(f, msg):
                 if f != '.hgsubstate': # .hgsubstate is auto-created
-                    raise util.Abort('%s: %s' % (f, msg))
+                    raise error.Abort('%s: %s' % (f, msg))
             match = scmutil.match(repo[None], pats, opts, badfn=badfn)
             changes = repo.status(match=match)
         else:
@@ -1153,7 +1161,7 @@
             commitfiles.extend(files)
         match = scmutil.matchfiles(repo, commitfiles)
         if len(repo[None].parents()) > 1:
-            raise util.Abort(_('cannot manage merge changesets'))
+            raise error.Abort(_('cannot manage merge changesets'))
         self.checktoppatch(repo)
         insert = self.fullseriesend()
         wlock = repo.wlock()
@@ -1162,7 +1170,7 @@
                 # if patch file write fails, abort early
                 p = self.opener(patchfn, "w")
             except IOError as e:
-                raise util.Abort(_('cannot write patch "%s": %s')
+                raise error.Abort(_('cannot write patch "%s": %s')
                                  % (patchfn, e.strerror))
             try:
                 defaultmsg = "[mq]: %s" % patchfn
@@ -1185,7 +1193,7 @@
                 n = newcommit(repo, None, commitmsg, user, date, match=match,
                               force=True, editor=editor)
                 if n is None:
-                    raise util.Abort(_("repo commit failed"))
+                    raise error.Abort(_("repo commit failed"))
                 try:
                     self.fullseries[insert:insert] = [patchfn]
                     self.applied.append(statusentry(n, patchfn))
@@ -1302,7 +1310,7 @@
                         else:
                             if i + off < len(self.series):
                                 return self.series[i + off]
-        raise util.Abort(_("patch %s not in series") % patch)
+        raise error.Abort(_("patch %s not in series") % patch)
 
     def push(self, repo, patch=None, force=False, list=False, mergeq=None,
              all=False, move=False, exact=False, nobackup=False,
@@ -1338,7 +1346,7 @@
                 pushable, reason = self.pushable(patch)
                 if pushable:
                     if self.series.index(patch) < self.seriesend():
-                        raise util.Abort(
+                        raise error.Abort(
                             _("cannot push to a previous patch: %s") % patch)
                 else:
                     if reason:
@@ -1367,25 +1375,25 @@
 
             if exact:
                 if keepchanges:
-                    raise util.Abort(
+                    raise error.Abort(
                         _("cannot use --exact and --keep-changes together"))
                 if move:
-                    raise util.Abort(_('cannot use --exact and --move '
+                    raise error.Abort(_('cannot use --exact and --move '
                                        'together'))
                 if self.applied:
-                    raise util.Abort(_('cannot push --exact with applied '
+                    raise error.Abort(_('cannot push --exact with applied '
                                        'patches'))
                 root = self.series[start]
                 target = patchheader(self.join(root), self.plainmode).parent
                 if not target:
-                    raise util.Abort(
+                    raise error.Abort(
                         _("%s does not have a parent recorded") % root)
                 if not repo[target] == repo['.']:
                     hg.update(repo, target)
 
             if move:
                 if not patch:
-                    raise util.Abort(_("please specify the patch to move"))
+                    raise error.Abort(_("please specify the patch to move"))
                 for fullstart, rpn in enumerate(self.fullseries):
                     # strip markers for patch guards
                     if self.guard_re.split(rpn, 1)[0] == self.series[start]:
@@ -1431,9 +1439,9 @@
             except AbortNoCleanup:
                 raise
             except: # re-raises
-                self.ui.warn(_('cleaning up working directory...'))
-                node = repo.dirstate.p1()
-                hg.revert(repo, node, None)
+                self.ui.warn(_('cleaning up working directory...\n'))
+                cmdutil.revert(self.ui, repo, repo['.'],
+                               repo.dirstate.parents(), no_backup=True)
                 # only remove unknown files that we know we touched or
                 # created while patching
                 for f in all_files:
@@ -1446,7 +1454,7 @@
                 return ret[0]
             top = self.applied[-1].name
             if ret[0] and ret[0] > 1:
-                msg = _("errors during apply, please fix and refresh %s\n")
+                msg = _("errors during apply, please fix and qrefresh %s\n")
                 self.ui.write(msg % top)
             else:
                 self.ui.write(_("now at: %s\n") % top)
@@ -1467,7 +1475,7 @@
                     patch = self.lookup(patch)
                 info = self.isapplied(patch)
                 if not info:
-                    raise util.Abort(_("patch %s is not applied") % patch)
+                    raise error.Abort(_("patch %s is not applied") % patch)
 
             if not self.applied:
                 # Allow qpop -a to work repeatedly,
@@ -1520,13 +1528,13 @@
                 heads = repo.changelog.heads(rev)
             except error.LookupError:
                 node = short(rev)
-                raise util.Abort(_('trying to pop unknown node %s') % node)
+                raise error.Abort(_('trying to pop unknown node %s') % node)
 
             if heads != [self.applied[-1].node]:
-                raise util.Abort(_("popping would remove a revision not "
+                raise error.Abort(_("popping would remove a revision not "
                                    "managed by this patch queue"))
             if not repo[self.applied[-1].node].mutable():
-                raise util.Abort(
+                raise error.Abort(
                     _("popping would remove a public revision"),
                     hint=_('see "hg help phases" for details'))
 
@@ -1537,11 +1545,11 @@
                 ctx = repo[qp]
                 m, a, r, d = repo.status(qp, '.')[:4]
                 if d:
-                    raise util.Abort(_("deletions found between repo revs"))
+                    raise error.Abort(_("deletions found between repo revs"))
 
                 tobackup = set(a + m + r) & tobackup
                 if keepchanges and tobackup:
-                    raise util.Abort(_("local changes found, refresh first"))
+                    raise error.Abort(_("local changes found, qrefresh first"))
                 self.backup(repo, tobackup)
                 repo.dirstate.beginparentchange()
                 for f in a:
@@ -1596,9 +1604,9 @@
             self.checktoppatch(repo)
             (top, patchfn) = (self.applied[-1].node, self.applied[-1].name)
             if repo.changelog.heads(top) != [top]:
-                raise util.Abort(_("cannot refresh a revision with children"))
+                raise error.Abort(_("cannot qrefresh a revision with children"))
             if not repo[top].mutable():
-                raise util.Abort(_("cannot refresh public revision"),
+                raise error.Abort(_("cannot qrefresh public revision"),
                                  hint=_('see "hg help phases" for details'))
 
             cparents = repo.changelog.parents(top)
@@ -1806,7 +1814,7 @@
                 ctx = repo[cparents[0]]
                 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
                 self.savedirty()
-                self.ui.warn(_('refresh interrupted while patch was popped! '
+                self.ui.warn(_('qrefresh interrupted while patch was popped! '
                                '(revert --all, qpush to recover)\n'))
                 raise
         finally:
@@ -1815,7 +1823,7 @@
 
     def init(self, repo, create=False):
         if not create and os.path.isdir(self.path):
-            raise util.Abort(_("patch queue directory already exists"))
+            raise error.Abort(_("patch queue directory already exists"))
         try:
             os.mkdir(self.path)
         except OSError as inst:
@@ -1826,7 +1834,7 @@
 
     def unapplied(self, repo, patch=None):
         if patch and patch not in self.series:
-            raise util.Abort(_("patch %s is not in series file") % patch)
+            raise error.Abort(_("patch %s is not in series file") % patch)
         if not patch:
             start = self.seriesend()
         else:
@@ -2034,19 +2042,19 @@
                 force=None, git=False):
         def checkseries(patchname):
             if patchname in self.series:
-                raise util.Abort(_('patch %s is already in the series file')
+                raise error.Abort(_('patch %s is already in the series file')
                                  % patchname)
 
         if rev:
             if files:
-                raise util.Abort(_('option "-r" not valid when importing '
+                raise error.Abort(_('option "-r" not valid when importing '
                                    'files'))
             rev = scmutil.revrange(repo, rev)
             rev.sort(reverse=True)
         elif not files:
-            raise util.Abort(_('no files or revisions specified'))
+            raise error.Abort(_('no files or revisions specified'))
         if (len(files) > 1 or len(rev) > 1) and patchname:
-            raise util.Abort(_('option "-n" not valid when importing multiple '
+            raise error.Abort(_('option "-n" not valid when importing multiple '
                                'patches'))
         imported = []
         if rev:
@@ -2055,21 +2063,21 @@
             # Otherwise, they should form a linear path to a head.
             heads = repo.changelog.heads(repo.changelog.node(rev.first()))
             if len(heads) > 1:
-                raise util.Abort(_('revision %d is the root of more than one '
+                raise error.Abort(_('revision %d is the root of more than one '
                                    'branch') % rev.last())
             if self.applied:
                 base = repo.changelog.node(rev.first())
                 if base in [n.node for n in self.applied]:
-                    raise util.Abort(_('revision %d is already managed')
+                    raise error.Abort(_('revision %d is already managed')
                                      % rev.first())
                 if heads != [self.applied[-1].node]:
-                    raise util.Abort(_('revision %d is not the parent of '
+                    raise error.Abort(_('revision %d is not the parent of '
                                        'the queue') % rev.first())
                 base = repo.changelog.rev(self.applied[0].node)
                 lastparent = repo.changelog.parentrevs(base)[0]
             else:
                 if heads != [repo.changelog.node(rev.first())]:
-                    raise util.Abort(_('revision %d has unmanaged children')
+                    raise error.Abort(_('revision %d has unmanaged children')
                                      % rev.first())
                 lastparent = None
 
@@ -2078,22 +2086,23 @@
             try:
                 for r in rev:
                     if not repo[r].mutable():
-                        raise util.Abort(_('revision %d is not mutable') % r,
+                        raise error.Abort(_('revision %d is not mutable') % r,
                                          hint=_('see "hg help phases" '
                                                 'for details'))
                     p1, p2 = repo.changelog.parentrevs(r)
                     n = repo.changelog.node(r)
                     if p2 != nullrev:
-                        raise util.Abort(_('cannot import merge revision %d')
+                        raise error.Abort(_('cannot import merge revision %d')
                                          % r)
                     if lastparent and lastparent != r:
-                        raise util.Abort(_('revision %d is not the parent of '
+                        raise error.Abort(_('revision %d is not the parent of '
                                            '%d')
                                          % (r, lastparent))
                     lastparent = p1
 
                     if not patchname:
-                        patchname = normname('%d.diff' % r)
+                        patchname = makepatchname(self.fullseries,
+                            repo[r].description().split('\n', 1)[0])
                     checkseries(patchname)
                     self.checkpatchname(patchname, force)
                     self.fullseries.insert(0, patchname)
@@ -2121,13 +2130,14 @@
         for i, filename in enumerate(files):
             if existing:
                 if filename == '-':
-                    raise util.Abort(_('-e is incompatible with import from -'))
+                    raise error.Abort(_('-e is incompatible with import from -')
+                                     )
                 filename = normname(filename)
                 self.checkreservedname(filename)
                 if util.url(filename).islocal():
                     originpath = self.join(filename)
                     if not os.path.isfile(originpath):
-                        raise util.Abort(
+                        raise error.Abort(
                             _("patch %s does not exist") % filename)
 
                 if patchname:
@@ -2141,7 +2151,7 @@
 
             else:
                 if filename == '-' and not patchname:
-                    raise util.Abort(_('need --name to import a patch from -'))
+                    raise error.Abort(_('need --name to import a patch from -'))
                 elif not patchname:
                     patchname = normname(os.path.basename(filename.rstrip('/')))
                 self.checkpatchname(patchname, force)
@@ -2153,7 +2163,7 @@
                         text = fp.read()
                         fp.close()
                 except (OSError, IOError):
-                    raise util.Abort(_("unable to read file %s") % filename)
+                    raise error.Abort(_("unable to read file %s") % filename)
                 patchf = self.opener(patchname, "w")
                 patchf.write(text)
                 patchf.close()
@@ -2212,7 +2222,7 @@
 
     if patch:
         if patch not in q.series:
-            raise util.Abort(_("patch %s is not in series file") % patch)
+            raise error.Abort(_("patch %s is not in series file") % patch)
         end = q.series.index(patch) + 1
     else:
         end = q.seriesend(True)
@@ -2244,7 +2254,7 @@
     q = repo.mq
     if patch:
         if patch not in q.series:
-            raise util.Abort(_("patch %s is not in series file") % patch)
+            raise error.Abort(_("patch %s is not in series file") % patch)
         start = q.series.index(patch) + 1
     else:
         start = q.seriesend(True)
@@ -2411,7 +2421,7 @@
     try:
         hg.peer(ui, opts, patchespath)
     except error.RepoError:
-        raise util.Abort(_('versioned patch repository not found'
+        raise error.Abort(_('versioned patch repository not found'
                            ' (see init --mq)'))
     qbase, destrev = None, None
     if sr.local():
@@ -2461,7 +2471,7 @@
     q = repo.mq
     r = q.qrepo()
     if not r:
-        raise util.Abort('no queue repository')
+        raise error.Abort('no queue repository')
     commands.commit(r.ui, r, *pats, **opts)
 
 @command("qseries",
@@ -2666,9 +2676,9 @@
     Returns 0 on success."""
     q = repo.mq
     if not files:
-        raise util.Abort(_('qfold requires at least one patch name'))
+        raise error.Abort(_('qfold requires at least one patch name'))
     if not q.checktoppatch(repo)[0]:
-        raise util.Abort(_('no patches applied'))
+        raise error.Abort(_('no patches applied'))
     q.checklocalchanges(repo)
 
     message = cmdutil.logmessage(ui, opts)
@@ -2681,7 +2691,7 @@
         if p in patches or p == parent:
             ui.warn(_('skipping already folded patch %s\n') % p)
         if q.isapplied(p):
-            raise util.Abort(_('qfold cannot fold already applied patch %s')
+            raise error.Abort(_('qfold cannot fold already applied patch %s')
                              % p)
         patches.append(p)
 
@@ -2693,7 +2703,7 @@
         pf = q.join(p)
         (patchsuccess, files, fuzz) = q.patch(repo, pf)
         if not patchsuccess:
-            raise util.Abort(_('error folding patch %s') % p)
+            raise error.Abort(_('error folding patch %s') % p)
 
     if not message:
         ph = patchheader(q.join(parent), q.plainmode)
@@ -2792,23 +2802,23 @@
     args = list(args)
     if opts.get('list'):
         if args or opts.get('none'):
-            raise util.Abort(_('cannot mix -l/--list with options or '
+            raise error.Abort(_('cannot mix -l/--list with options or '
                                'arguments'))
         for i in xrange(len(q.series)):
             status(i)
         return
     if not args or args[0][0:1] in '-+':
         if not q.applied:
-            raise util.Abort(_('no patches applied'))
+            raise error.Abort(_('no patches applied'))
         patch = q.applied[-1].name
     if patch is None and args[0][0:1] not in '-+':
         patch = args.pop(0)
     if patch is None:
-        raise util.Abort(_('no patch to work with'))
+        raise error.Abort(_('no patch to work with'))
     if args or opts.get('none'):
         idx = q.findseries(patch)
         if idx is None:
-            raise util.Abort(_('no patch named %s') % patch)
+            raise error.Abort(_('no patch named %s') % patch)
         q.setguards(idx, args)
         q.savedirty()
     else:
@@ -3034,10 +3044,10 @@
             newpath = os.path.join(q.basepath, opts.get('name'))
             if os.path.exists(newpath):
                 if not os.path.isdir(newpath):
-                    raise util.Abort(_('destination %s exists and is not '
+                    raise error.Abort(_('destination %s exists and is not '
                                        'a directory') % newpath)
                 if not opts.get('force'):
-                    raise util.Abort(_('destination %s exists, '
+                    raise error.Abort(_('destination %s exists, '
                                        'use -f to force') % newpath)
         else:
             newpath = savename(path)
@@ -3181,7 +3191,7 @@
     Returns 0 on success.
     """
     if not opts.get('applied') and not revrange:
-        raise util.Abort(_('no revisions specified'))
+        raise error.Abort(_('no revisions specified'))
     elif opts.get('applied'):
         revrange = ('qbase::qtip',) + revrange
 
@@ -3270,7 +3280,7 @@
 
     def _setactive(name):
         if q.applied:
-            raise util.Abort(_('new queue created, but cannot make active '
+            raise error.Abort(_('new queue created, but cannot make active '
                                'as patches are applied'))
         _setactivenocheck(name)
 
@@ -3299,12 +3309,12 @@
 
     def _delete(name):
         if name not in existing:
-            raise util.Abort(_('cannot delete queue that does not exist'))
+            raise error.Abort(_('cannot delete queue that does not exist'))
 
         current = _getcurrent()
 
         if name == current:
-            raise util.Abort(_('cannot delete currently active queue'))
+            raise error.Abort(_('cannot delete currently active queue'))
 
         fh = repo.vfs('patches.queues.new', 'w')
         for queue in existing:
@@ -3328,14 +3338,14 @@
         return
 
     if not _validname(name):
-        raise util.Abort(
+        raise error.Abort(
                 _('invalid queue name, may not contain the characters ":\\/."'))
 
     existing = _getqueues()
 
     if opts.get('create'):
         if name in existing:
-            raise util.Abort(_('queue "%s" already exists') % name)
+            raise error.Abort(_('queue "%s" already exists') % name)
         if _noqueues():
             _addqueue(_defaultqueue)
         _addqueue(name)
@@ -3343,15 +3353,16 @@
     elif opts.get('rename'):
         current = _getcurrent()
         if name == current:
-            raise util.Abort(_('can\'t rename "%s" to its current name') % name)
+            raise error.Abort(_('can\'t rename "%s" to its current name')
+                              % name)
         if name in existing:
-            raise util.Abort(_('queue "%s" already exists') % name)
+            raise error.Abort(_('queue "%s" already exists') % name)
 
         olddir = _queuedir(current)
         newdir = _queuedir(name)
 
         if os.path.exists(newdir):
-            raise util.Abort(_('non-queue directory "%s" already exists') %
+            raise error.Abort(_('non-queue directory "%s" already exists') %
                     newdir)
 
         fh = repo.vfs('patches.queues.new', 'w')
@@ -3375,7 +3386,7 @@
             shutil.rmtree(qdir)
     else:
         if name not in existing:
-            raise util.Abort(_('use --create to create a new queue'))
+            raise error.Abort(_('use --create to create a new queue'))
         _setactive(name)
 
 def mqphasedefaults(repo, roots):
@@ -3406,7 +3417,7 @@
                 parents = self.dirstate.parents()
                 patches = [s.node for s in self.mq.applied]
                 if parents[0] in patches or parents[1] in patches:
-                    raise util.Abort(errmsg)
+                    raise error.Abort(errmsg)
 
         def commit(self, text="", user=None, date=None, match=None,
                    force=False, editor=False, extra={}):
@@ -3433,7 +3444,7 @@
                 # looking for pushed and shared changeset
                 for node in outapplied:
                     if self[node].phase() < phases.secret:
-                        raise util.Abort(_('source has mq patches applied'))
+                        raise error.Abort(_('source has mq patches applied'))
                 # no non-secret patches pushed
             super(mqrepo, self).checkpush(pushop)
 
@@ -3495,12 +3506,12 @@
     if args:
         repopath = args[0]
         if not hg.islocal(repopath):
-            raise util.Abort(_('only a local queue repository '
+            raise error.Abort(_('only a local queue repository '
                                'may be initialized'))
     else:
         repopath = cmdutil.findrepo(os.getcwd())
         if not repopath:
-            raise util.Abort(_('there is no Mercurial repository here '
+            raise error.Abort(_('there is no Mercurial repository here '
                                '(.hg not found)'))
     repo = hg.repository(ui, repopath)
     return qinit(ui, repo, True)
@@ -3517,7 +3528,7 @@
     q = repo.mq
     r = q.qrepo()
     if not r:
-        raise util.Abort(_('no queue repository'))
+        raise error.Abort(_('no queue repository'))
     return orig(r.ui, r, *args, **kwargs)
 
 def summaryhook(ui, repo):
--- a/hgext/notify.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/notify.py	Tue Oct 20 15:59:10 2015 -0500
@@ -135,7 +135,7 @@
 
 import email, socket, time
 from mercurial.i18n import _
-from mercurial import patch, cmdutil, util, mail
+from mercurial import patch, cmdutil, util, mail, error
 import fnmatch
 
 # Note for extension authors: ONLY specify testedwith = 'internal' for
@@ -277,7 +277,7 @@
         try:
             msg = p.parsestr(data)
         except email.Errors.MessageParseError as inst:
-            raise util.Abort(inst)
+            raise error.Abort(inst)
 
         # store sender and subject
         sender, subject = msg['From'], msg['Subject']
--- a/hgext/pager.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/pager.py	Tue Oct 20 15:59:10 2015 -0500
@@ -65,13 +65,19 @@
 # leave the attribute unspecified.
 testedwith = 'internal'
 
-def _pagersubprocess(ui, p):
+def _runpager(ui, p):
     pager = subprocess.Popen(p, shell=True, bufsize=-1,
                              close_fds=util.closefds, stdin=subprocess.PIPE,
                              stdout=sys.stdout, stderr=sys.stderr)
 
-    stdout = os.dup(sys.stdout.fileno())
-    stderr = os.dup(sys.stderr.fileno())
+    # back up original file objects and descriptors
+    olduifout = ui.fout
+    oldstdout = sys.stdout
+    stdoutfd = os.dup(sys.stdout.fileno())
+    stderrfd = os.dup(sys.stderr.fileno())
+
+    # create new line-buffered stdout so that output can show up immediately
+    ui.fout = sys.stdout = newstdout = os.fdopen(sys.stdout.fileno(), 'wb', 1)
     os.dup2(pager.stdin.fileno(), sys.stdout.fileno())
     if ui._isatty(sys.stderr):
         os.dup2(pager.stdin.fileno(), sys.stderr.fileno())
@@ -81,13 +87,16 @@
         if util.safehasattr(signal, "SIGINT"):
             signal.signal(signal.SIGINT, signal.SIG_IGN)
         pager.stdin.close()
-        os.dup2(stdout, sys.stdout.fileno())
-        os.dup2(stderr, sys.stderr.fileno())
+        ui.fout = olduifout
+        sys.stdout = oldstdout
+        # close new stdout while it's associated with pager; otherwise stdout
+        # fd would be closed when newstdout is deleted
+        newstdout.close()
+        # restore original fds: stdout is open again
+        os.dup2(stdoutfd, sys.stdout.fileno())
+        os.dup2(stderrfd, sys.stderr.fileno())
         pager.wait()
 
-def _runpager(ui, p):
-    _pagersubprocess(ui, p)
-
 def uisetup(ui):
     if '--debugger' in sys.argv or not ui.formatted():
         return
--- a/hgext/patchbomb.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/patchbomb.py	Tue Oct 20 15:59:10 2015 -0500
@@ -60,7 +60,7 @@
 import os, errno, socket, tempfile, cStringIO
 import email
 
-from mercurial import cmdutil, commands, hg, mail, patch, util
+from mercurial import cmdutil, commands, hg, mail, patch, util, error
 from mercurial import scmutil
 from mercurial.i18n import _
 from mercurial.node import bin
@@ -73,6 +73,24 @@
 # leave the attribute unspecified.
 testedwith = 'internal'
 
+def _addpullheader(seq, ctx):
+    """Add a header pointing to a public URL where the changeset is available
+    """
+    repo = ctx.repo()
+    # experimental config: patchbomb.publicurl
+    # waiting for some logic that check that the changeset are available on the
+    # destination before patchbombing anything.
+    pullurl = repo.ui.config('patchbomb', 'publicurl')
+    if pullurl is not None:
+        return ('Available At %s\n'
+                '#              hg pull %s -r %s' % (pullurl, pullurl, ctx))
+    return None
+
+def uisetup(ui):
+    cmdutil.extraexport.append('pullurl')
+    cmdutil.extraexportmap['pullurl'] = _addpullheader
+
+
 def prompt(ui, prompt, default=None, rest=':'):
     if default:
         prompt += ' [%s]' % default
@@ -205,6 +223,9 @@
     ui = repo.ui
     tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
     tmpfn = os.path.join(tmpdir, 'bundle')
+    btype = ui.config('patchbomb', 'bundletype')
+    if btype:
+        opts['type'] = btype
     try:
         commands.bundle(ui, repo, tmpfn, dest, **opts)
         fp = open(tmpfn, 'rb')
@@ -416,7 +437,8 @@
 
     With -b/--bundle, changesets are selected as for --outgoing, but a
     single email containing a binary Mercurial bundle as an attachment
-    will be sent.
+    will be sent. Use the ``patchbomb.bundletype`` config option to
+    control the bundle type as with :hg:`bundle --type`.
 
     With -m/--mbox, instead of previewing each patchbomb message in a
     pager or sending the messages directly, it will create a UNIX
@@ -479,15 +501,15 @@
         mail.validateconfig(ui)
 
     if not (revs or rev or outgoing or bundle or patches):
-        raise util.Abort(_('specify at least one changeset with -r or -o'))
+        raise error.Abort(_('specify at least one changeset with -r or -o'))
 
     if outgoing and bundle:
-        raise util.Abort(_("--outgoing mode always on with --bundle;"
+        raise error.Abort(_("--outgoing mode always on with --bundle;"
                            " do not re-specify --outgoing"))
 
     if outgoing or bundle:
         if len(revs) > 1:
-            raise util.Abort(_("too many destinations"))
+            raise error.Abort(_("too many destinations"))
         if revs:
             dest = revs[0]
         else:
@@ -496,7 +518,7 @@
 
     if rev:
         if revs:
-            raise util.Abort(_('use only one form to specify the revision'))
+            raise error.Abort(_('use only one form to specify the revision'))
         revs = rev
 
     revs = scmutil.revrange(repo, revs)
@@ -505,6 +527,37 @@
     if bundle:
         opts['revs'] = [str(r) for r in revs]
 
+    # check if revision exist on the public destination
+    publicurl = repo.ui.config('patchbomb', 'publicurl')
+    if publicurl is not None:
+        repo.ui.debug('checking that revision exist in the public repo')
+        try:
+            publicpeer = hg.peer(repo, {}, publicurl)
+        except error.RepoError:
+            repo.ui.write_err(_('unable to access public repo: %s\n')
+                              % publicurl)
+            raise
+        if not publicpeer.capable('known'):
+            repo.ui.debug('skipping existence checks: public repo too old')
+        else:
+            out = [repo[r] for r in revs]
+            known = publicpeer.known(h.node() for h in out)
+            missing = []
+            for idx, h in enumerate(out):
+                if not known[idx]:
+                    missing.append(h)
+            if missing:
+                if 1 < len(missing):
+                    msg = _('public "%s" is missing %s and %i others')
+                    msg %= (publicurl, missing[0], len(missing) - 1)
+                else:
+                    msg = _('public url %s is missing %s')
+                    msg %= (publicurl, missing[0])
+                revhint = ''.join('-r %s' % h
+                                  for h in repo.set('heads(%ld)', missing))
+                hint = _('use "hg push %s %s"') % (publicurl, revhint)
+                raise error.Abort(msg, hint=hint)
+
     # start
     if date:
         start_time = util.parsedate(date)
@@ -556,7 +609,7 @@
     to = getaddrs('To', ask=True)
     if not to:
         # we can get here in non-interactive mode
-        raise util.Abort(_('no recipient addresses provided'))
+        raise error.Abort(_('no recipient addresses provided'))
     cc = getaddrs('Cc', ask=True, default='') or []
     bcc = getaddrs('Bcc') or []
     replyto = getaddrs('Reply-To')
@@ -576,7 +629,7 @@
         ui.write('\n')
         if ui.promptchoice(_('are you sure you want to send (yn)?'
                              '$$ &Yes $$ &No')):
-            raise util.Abort(_('patchbomb canceled'))
+            raise error.Abort(_('patchbomb canceled'))
 
     ui.write('\n')
 
--- a/hgext/progress.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/progress.py	Tue Oct 20 15:59:10 2015 -0500
@@ -10,3 +10,8 @@
 This extension has been merged into core, you can remove it from your config.
 See hg help config.progress for configuration options.
 """
+# Note for extension authors: ONLY specify testedwith = 'internal' for
+# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
+# be specifying the version(s) of Mercurial they are tested with, or
+# leave the attribute unspecified.
+testedwith = 'internal'
--- a/hgext/purge.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/purge.py	Tue Oct 20 15:59:10 2015 -0500
@@ -1,6 +1,6 @@
 # Copyright (C) 2006 - Marco Barisione <marco@barisione.org>
 #
-# This is a small extension for Mercurial (http://mercurial.selenic.com/)
+# This is a small extension for Mercurial (https://mercurial-scm.org/)
 # that removes files not known to mercurial
 #
 # This program was inspired by the "cvspurge" script contained in CVS
@@ -24,7 +24,7 @@
 
 '''command to delete untracked files from the working directory'''
 
-from mercurial import util, commands, cmdutil, scmutil
+from mercurial import util, commands, cmdutil, scmutil, error
 from mercurial.i18n import _
 import os
 
@@ -94,7 +94,7 @@
             except OSError:
                 m = _('%s cannot be removed') % name
                 if opts['abort_on_err']:
-                    raise util.Abort(m)
+                    raise error.Abort(m)
                 ui.warn(_('warning: %s\n') % m)
         else:
             ui.write('%s%s' % (name, eol))
--- a/hgext/rebase.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/rebase.py	Tue Oct 20 15:59:10 2015 -0500
@@ -11,21 +11,27 @@
 repository.
 
 For more information:
-http://mercurial.selenic.com/wiki/RebaseExtension
+https://mercurial-scm.org/wiki/RebaseExtension
 '''
 
 from mercurial import hg, util, repair, merge, cmdutil, commands, bookmarks
 from mercurial import extensions, patch, scmutil, phases, obsolete, error
-from mercurial import copies, repoview
+from mercurial import copies, repoview, revset
 from mercurial.commands import templateopts
 from mercurial.node import nullrev, nullid, hex, short
 from mercurial.lock import release
 from mercurial.i18n import _
 import os, errno
 
+# The following constants are used throughout the rebase module. The ordering of
+# their values must be maintained.
+
+# Indicates that a revision needs to be rebased
 revtodo = -1
 nullmerge = -2
 revignored = -3
+# To do with obsolescence
+revprecursor = -4
 
 cmdtable = {}
 command = cmdutil.command(cmdtable)
@@ -35,6 +41,9 @@
 # leave the attribute unspecified.
 testedwith = 'internal'
 
+def _nothingtorebase():
+    return 1
+
 def _savegraft(ctx, extra):
     s = ctx.extra().get('source', None)
     if s is not None:
@@ -54,6 +63,24 @@
             c(ctx, extra)
     return extrafn
 
+def _destrebase(repo):
+    # Destination defaults to the latest revision in the
+    # current branch
+    branch = repo[None].branch()
+    return repo[branch].rev()
+
+def _revsetdestrebase(repo, subset, x):
+    # ``_rebasedefaultdest()``
+
+    # default destination for rebase.
+    # # XXX: Currently private because I expect the signature to change.
+    # # XXX: - taking rev as arguments,
+    # # XXX: - bailing out in case of ambiguity vs returning all data.
+    # # XXX: - probably merging with the merge destination.
+    # i18n: "_rebasedefaultdest" is a keyword
+    revset.getargs(x, 0, 0, _("_rebasedefaultdest takes no arguments"))
+    return subset & revset.baseset([_destrebase(repo)])
+
 @command('rebase',
     [('s', 'source', '',
      _('rebase the specified changeset and descendants'), _('REV')),
@@ -171,6 +198,9 @@
     originalwd = target = None
     activebookmark = None
     external = nullrev
+    # Mapping between the old revision id and either what is the new rebased
+    # revision or what needs to be done with the old revision. The state dict
+    # will be what contains most of the rebase progress state.
     state = {}
     skipped = set()
     targetancestors = set()
@@ -201,24 +231,30 @@
         keepopen = opts.get('keepopen', False)
 
         if opts.get('interactive'):
+            try:
+                if extensions.find('histedit'):
+                    enablehistedit = ''
+            except KeyError:
+                enablehistedit = " --config extensions.histedit="
+            help = "hg%s help -e histedit" % enablehistedit
             msg = _("interactive history editing is supported by the "
-                    "'histedit' extension (see \"hg help histedit\")")
-            raise util.Abort(msg)
+                    "'histedit' extension (see \"%s\")") % help
+            raise error.Abort(msg)
 
         if collapsemsg and not collapsef:
-            raise util.Abort(
+            raise error.Abort(
                 _('message can only be specified with collapse'))
 
         if contf or abortf:
             if contf and abortf:
-                raise util.Abort(_('cannot use both abort and continue'))
+                raise error.Abort(_('cannot use both abort and continue'))
             if collapsef:
-                raise util.Abort(
+                raise error.Abort(
                     _('cannot use collapse with continue or abort'))
             if srcf or basef or destf:
-                raise util.Abort(
+                raise error.Abort(
                     _('abort and continue do not allow specifying revisions'))
-            if opts.get('tool', False):
+            if abortf and opts.get('tool', False):
                 ui.warn(_('tool option will be ignored\n'))
 
             try:
@@ -233,44 +269,42 @@
                 else:
                     msg = _('cannot continue inconsistent rebase')
                     hint = _('use "hg rebase --abort" to clear broken state')
-                    raise util.Abort(msg, hint=hint)
+                    raise error.Abort(msg, hint=hint)
             if abortf:
                 return abort(repo, originalwd, target, state,
                              activebookmark=activebookmark)
         else:
             if srcf and basef:
-                raise util.Abort(_('cannot specify both a '
+                raise error.Abort(_('cannot specify both a '
                                    'source and a base'))
             if revf and basef:
-                raise util.Abort(_('cannot specify both a '
+                raise error.Abort(_('cannot specify both a '
                                    'revision and a base'))
             if revf and srcf:
-                raise util.Abort(_('cannot specify both a '
+                raise error.Abort(_('cannot specify both a '
                                    'revision and a source'))
 
             cmdutil.checkunfinished(repo)
             cmdutil.bailifchanged(repo)
 
-            if not destf:
-                # Destination defaults to the latest revision in the
-                # current branch
-                branch = repo[None].branch()
-                dest = repo[branch]
+            if destf:
+                dest = scmutil.revsingle(repo, destf)
             else:
-                dest = scmutil.revsingle(repo, destf)
+                dest = repo[_destrebase(repo)]
+                destf = str(dest)
 
             if revf:
                 rebaseset = scmutil.revrange(repo, revf)
                 if not rebaseset:
                     ui.status(_('empty "rev" revision set - '
                                 'nothing to rebase\n'))
-                    return 1
+                    return _nothingtorebase()
             elif srcf:
                 src = scmutil.revrange(repo, [srcf])
                 if not src:
                     ui.status(_('empty "source" revision set - '
                                 'nothing to rebase\n'))
-                    return 1
+                    return _nothingtorebase()
                 rebaseset = repo.revs('(%ld)::', src)
                 assert rebaseset
             else:
@@ -278,7 +312,7 @@
                 if not base:
                     ui.status(_('empty "base" revision set - '
                                 "can't compute rebase set\n"))
-                    return 1
+                    return _nothingtorebase()
                 commonanc = repo.revs('ancestor(%ld, %d)', base, dest).first()
                 if commonanc is not None:
                     rebaseset = repo.revs('(%d::(%ld) - %d)::',
@@ -311,26 +345,39 @@
                     else: # can it happen?
                         ui.status(_('nothing to rebase from %s to %s\n') %
                                   ('+'.join(str(repo[r]) for r in base), dest))
-                    return 1
+                    return _nothingtorebase()
 
             allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
             if (not (keepf or allowunstable)
                   and repo.revs('first(children(%ld) - %ld)',
                                 rebaseset, rebaseset)):
-                raise util.Abort(
+                raise error.Abort(
                     _("can't remove original changesets with"
                       " unrebased descendants"),
                     hint=_('use --keep to keep original changesets'))
 
-            result = buildstate(repo, dest, rebaseset, collapsef)
+            obsoletenotrebased = {}
+            if ui.configbool('experimental', 'rebaseskipobsolete'):
+                rebasesetrevs = set(rebaseset)
+                obsoletenotrebased = _computeobsoletenotrebased(repo,
+                                                                rebasesetrevs,
+                                                                dest)
+
+                # - plain prune (no successor) changesets are rebased
+                # - split changesets are not rebased if at least one of the
+                # changeset resulting from the split is an ancestor of dest
+                rebaseset = rebasesetrevs - set(obsoletenotrebased)
+            result = buildstate(repo, dest, rebaseset, collapsef,
+                                obsoletenotrebased)
+
             if not result:
                 # Empty state built, nothing to rebase
                 ui.status(_('nothing to rebase\n'))
-                return 1
+                return _nothingtorebase()
 
             root = min(rebaseset)
             if not keepf and not repo[root].mutable():
-                raise util.Abort(_("can't rebase public changeset %s")
+                raise error.Abort(_("can't rebase public changeset %s")
                                  % repo[root],
                                  hint=_('see "hg help phases" for details'))
 
@@ -353,7 +400,7 @@
                 for rev in state:
                     branches.add(repo[rev].branch())
                     if len(branches) > 1:
-                        raise util.Abort(_('cannot collapse multiple named '
+                        raise error.Abort(_('cannot collapse multiple named '
                             'branches'))
 
         # Rebase
@@ -406,7 +453,8 @@
                     editform = cmdutil.mergeeditform(merging, 'rebase')
                     editor = cmdutil.getcommiteditor(editform=editform, **opts)
                     newnode = concludenode(repo, rev, p1, p2, extrafn=extrafn,
-                                           editor=editor)
+                                           editor=editor,
+                                           keepbranches=keepbranchesf)
                 else:
                     # Skip commit if we are collapsing
                     repo.dirstate.beginparentchange()
@@ -418,9 +466,9 @@
                     state[rev] = repo[newnode].rev()
                     ui.debug('rebased as %s\n' % short(newnode))
                 else:
-                    ui.warn(_('note: rebase of %d:%s created no changes '
-                              'to commit\n') % (rev, ctx))
                     if not collapsef:
+                        ui.warn(_('note: rebase of %d:%s created no changes '
+                                  'to commit\n') % (rev, ctx))
                         skipped.add(rev)
                     state[rev] = p1
                     ui.debug('next revision set to %s\n' % p1)
@@ -428,6 +476,12 @@
                 ui.debug('ignoring null merge rebase of %s\n' % rev)
             elif state[rev] == revignored:
                 ui.status(_('not rebasing ignored %s\n') % desc)
+            elif state[rev] == revprecursor:
+                targetctx = repo[obsoletenotrebased[rev]]
+                desctarget = '%d:%s "%s"' % (targetctx.rev(), targetctx,
+                             targetctx.description().split('\n', 1)[0])
+                msg = _('note: not rebasing %s, already in destination as %s\n')
+                ui.status(msg % (desc, desctarget))
             else:
                 ui.status(_('already rebased %s as %s\n') %
                           (desc, repo[state[rev]]))
@@ -450,7 +504,8 @@
                 editopt = True
             editor = cmdutil.getcommiteditor(edit=editopt, editform=editform)
             newnode = concludenode(repo, rev, p1, external, commitmsg=commitmsg,
-                                   extrafn=extrafn, editor=editor)
+                                   extrafn=extrafn, editor=editor,
+                                   keepbranches=keepbranchesf)
             if newnode is None:
                 newrev = target
             else:
@@ -525,12 +580,13 @@
         return nullrev
     if len(parents) == 1:
         return parents.pop()
-    raise util.Abort(_('unable to collapse on top of %s, there is more '
+    raise error.Abort(_('unable to collapse on top of %s, there is more '
                        'than one external parent: %s') %
                      (max(targetancestors),
                       ', '.join(str(p) for p in sorted(parents))))
 
-def concludenode(repo, rev, p1, p2, commitmsg=None, editor=None, extrafn=None):
+def concludenode(repo, rev, p1, p2, commitmsg=None, editor=None, extrafn=None,
+                 keepbranches=False):
     '''Commit the wd changes with parents p1 and p2. Reuse commit info from rev
     but also store useful information in extra.
     Return node of committed revision.'''
@@ -540,6 +596,7 @@
         ctx = repo[rev]
         if commitmsg is None:
             commitmsg = ctx.description()
+        keepbranch = keepbranches and repo[p1].branch() != ctx.branch()
         extra = {'rebase_source': ctx.hex()}
         if extrafn:
             extrafn(ctx, extra)
@@ -548,6 +605,8 @@
         try:
             targetphase = max(ctx.phase(), phases.draft)
             repo.ui.setconfig('phases', 'new-commit', targetphase, 'rebase')
+            if keepbranch:
+                repo.ui.setconfig('ui', 'allowemptycommit', True)
             # Commit might fail if unresolved files exist
             newnode = repo.commit(text=commitmsg, user=ctx.user(),
                                   date=ctx.date(), extra=extra, editor=editor)
@@ -569,7 +628,7 @@
         merge.update(repo, p1, False, True, False)
     else:
         repo.ui.debug(" already in target\n")
-    repo.dirstate.write()
+    repo.dirstate.write(repo.currenttransaction())
     repo.ui.debug(" merge against %d:%s\n" % (rev, repo[rev]))
     if base is not None:
         repo.ui.debug("   detach base %d:%s\n" % (base, repo[base]))
@@ -609,7 +668,7 @@
     elif p1n in state:
         if state[p1n] == nullmerge:
             p1 = target
-        elif state[p1n] == revignored:
+        elif state[p1n] in (revignored, revprecursor):
             p1 = nearestrebased(repo, p1n, state)
             if p1 is None:
                 p1 = target
@@ -625,7 +684,7 @@
         if p2n in state:
             if p1 == target: # p1n in targetancestors or external
                 p1 = state[p2n]
-            elif state[p2n] == revignored:
+            elif state[p2n] in (revignored, revprecursor):
                 p2 = nearestrebased(repo, p2n, state)
                 if p2 is None:
                     # no ancestors rebased yet, detach
@@ -634,7 +693,7 @@
                 p2 = state[p2n]
         else: # p2n external
             if p2 != nullrev: # p1n external too => rev is a merged revision
-                raise util.Abort(_('cannot use revision %d as base, result '
+                raise error.Abort(_('cannot use revision %d as base, result '
                         'would have 3 parents') % rev)
             p2 = p2n
     repo.ui.debug(" future parents are %d and %d\n" %
@@ -786,13 +845,14 @@
 
 def restorestatus(repo):
     'Restore a previously stored status'
+    keepbranches = None
+    target = None
+    collapse = False
+    external = nullrev
+    activebookmark = None
+    state = {}
+
     try:
-        keepbranches = None
-        target = None
-        collapse = False
-        external = nullrev
-        activebookmark = None
-        state = {}
         f = repo.vfs("rebasestate")
         for i, l in enumerate(f.read().splitlines()):
             if i == 0:
@@ -813,7 +873,8 @@
                 activebookmark = l
             else:
                 oldrev, newrev = l.split(':')
-                if newrev in (str(nullmerge), str(revignored)):
+                if newrev in (str(nullmerge), str(revignored),
+                              str(revprecursor)):
                     state[repo[oldrev].rev()] = int(newrev)
                 elif newrev == nullid:
                     state[repo[oldrev].rev()] = revtodo
@@ -821,27 +882,28 @@
                 else:
                     state[repo[oldrev].rev()] = repo[newrev].rev()
 
-        if keepbranches is None:
-            raise util.Abort(_('.hg/rebasestate is incomplete'))
-
-        skipped = set()
-        # recompute the set of skipped revs
-        if not collapse:
-            seen = set([target])
-            for old, new in sorted(state.items()):
-                if new != revtodo and new in seen:
-                    skipped.add(old)
-                seen.add(new)
-        repo.ui.debug('computed skipped revs: %s\n' %
-                      (' '.join(str(r) for r in sorted(skipped)) or None))
-        repo.ui.debug('rebase status resumed\n')
-        _setrebasesetvisibility(repo, state.keys())
-        return (originalwd, target, state, skipped,
-                collapse, keep, keepbranches, external, activebookmark)
     except IOError as err:
         if err.errno != errno.ENOENT:
             raise
-        raise util.Abort(_('no rebase in progress'))
+        raise error.Abort(_('no rebase in progress'))
+
+    if keepbranches is None:
+        raise error.Abort(_('.hg/rebasestate is incomplete'))
+
+    skipped = set()
+    # recompute the set of skipped revs
+    if not collapse:
+        seen = set([target])
+        for old, new in sorted(state.items()):
+            if new != revtodo and new in seen:
+                skipped.add(old)
+            seen.add(new)
+    repo.ui.debug('computed skipped revs: %s\n' %
+                    (' '.join(str(r) for r in sorted(skipped)) or None))
+    repo.ui.debug('rebase status resumed\n')
+    _setrebasesetvisibility(repo, state.keys())
+    return (originalwd, target, state, skipped,
+            collapse, keep, keepbranches, external, activebookmark)
 
 def needupdate(repo, state):
     '''check whether we should `update --clean` away from a merge, or if
@@ -865,43 +927,50 @@
 
     activebookmark: the name of the bookmark that should be active after the
         restore'''
-    dstates = [s for s in state.values() if s >= 0]
-    immutable = [d for d in dstates if not repo[d].mutable()]
-    cleanup = True
-    if immutable:
-        repo.ui.warn(_("warning: can't clean up public changesets %s\n")
-                     % ', '.join(str(repo[r]) for r in immutable),
-                     hint=_('see "hg help phases" for details'))
-        cleanup = False
 
-    descendants = set()
-    if dstates:
-        descendants = set(repo.changelog.descendants(dstates))
-    if descendants - set(dstates):
-        repo.ui.warn(_("warning: new changesets detected on target branch, "
-                       "can't strip\n"))
-        cleanup = False
+    try:
+        # If the first commits in the rebased set get skipped during the rebase,
+        # their values within the state mapping will be the target rev id. The
+        # dstates list must must not contain the target rev (issue4896)
+        dstates = [s for s in state.values() if s >= 0 and s != target]
+        immutable = [d for d in dstates if not repo[d].mutable()]
+        cleanup = True
+        if immutable:
+            repo.ui.warn(_("warning: can't clean up public changesets %s\n")
+                        % ', '.join(str(repo[r]) for r in immutable),
+                        hint=_('see "hg help phases" for details'))
+            cleanup = False
 
-    if cleanup:
-        # Update away from the rebase if necessary
-        if needupdate(repo, state):
-            merge.update(repo, originalwd, False, True, False)
+        descendants = set()
+        if dstates:
+            descendants = set(repo.changelog.descendants(dstates))
+        if descendants - set(dstates):
+            repo.ui.warn(_("warning: new changesets detected on target branch, "
+                        "can't strip\n"))
+            cleanup = False
+
+        if cleanup:
+            # Update away from the rebase if necessary
+            if needupdate(repo, state):
+                merge.update(repo, originalwd, False, True, False)
 
-        # Strip from the first rebased revision
-        rebased = filter(lambda x: x >= 0 and x != target, state.values())
-        if rebased:
-            strippoints = [c.node()  for c in repo.set('roots(%ld)', rebased)]
-            # no backup of rebased cset versions needed
-            repair.strip(repo.ui, repo, strippoints)
+            # Strip from the first rebased revision
+            rebased = filter(lambda x: x >= 0 and x != target, state.values())
+            if rebased:
+                strippoints = [
+                        c.node()  for c in repo.set('roots(%ld)', rebased)]
+                # no backup of rebased cset versions needed
+                repair.strip(repo.ui, repo, strippoints)
 
-    if activebookmark and activebookmark in repo._bookmarks:
-        bookmarks.activate(repo, activebookmark)
+        if activebookmark and activebookmark in repo._bookmarks:
+            bookmarks.activate(repo, activebookmark)
 
-    clearstatus(repo)
-    repo.ui.warn(_('rebase aborted\n'))
+    finally:
+        clearstatus(repo)
+        repo.ui.warn(_('rebase aborted\n'))
     return 0
 
-def buildstate(repo, dest, rebaseset, collapse):
+def buildstate(repo, dest, rebaseset, collapse, obsoletenotrebased):
     '''Define which revisions are going to be rebased and where
 
     repo: repo
@@ -915,18 +984,18 @@
     # a partially completed rebase is blocked by mq.
     if 'qtip' in repo.tags() and (dest.node() in
                             [s.node for s in repo.mq.applied]):
-        raise util.Abort(_('cannot rebase onto an applied mq patch'))
+        raise error.Abort(_('cannot rebase onto an applied mq patch'))
 
     roots = list(repo.set('roots(%ld)', rebaseset))
     if not roots:
-        raise util.Abort(_('no matching revisions'))
+        raise error.Abort(_('no matching revisions'))
     roots.sort()
     state = {}
     detachset = set()
     for root in roots:
         commonbase = root.ancestor(dest)
         if commonbase == root:
-            raise util.Abort(_('source is ancestor of destination'))
+            raise error.Abort(_('source is ancestor of destination'))
         if commonbase == dest:
             samebranch = root.branch() == dest.branch()
             if not collapse and samebranch and root in dest.children():
@@ -988,6 +1057,8 @@
         rebasedomain = set(repo.revs('%ld::%ld', rebaseset, rebaseset))
         for ignored in set(rebasedomain) - set(rebaseset):
             state[ignored] = revignored
+    for r in obsoletenotrebased:
+        state[r] = revprecursor
     return repo['.'].rev(), dest.rev(), state
 
 def clearrebased(ui, repo, state, skipped, collapsedas=None):
@@ -1068,7 +1139,7 @@
             release(lock, wlock)
     else:
         if opts.get('tool'):
-            raise util.Abort(_('--tool can only be used with --rebase'))
+            raise error.Abort(_('--tool can only be used with --rebase'))
         orig(ui, repo, *args, **opts)
 
 def _setrebasesetvisibility(repo, revs):
@@ -1096,6 +1167,36 @@
     blockers.update(getattr(repo, '_rebaseset', ()))
     return blockers
 
+def _computeobsoletenotrebased(repo, rebasesetrevs, dest):
+    """return a mapping obsolete => successor for all obsolete nodes to be
+    rebased that have a successors in the destination"""
+    obsoletenotrebased = {}
+
+    # Build a mapping successor => obsolete nodes for the obsolete
+    # nodes to be rebased
+    allsuccessors = {}
+    cl = repo.changelog
+    for r in rebasesetrevs:
+        n = repo[r]
+        if n.obsolete():
+            node = cl.node(r)
+            for s in obsolete.allsuccessors(repo.obsstore, [node]):
+                try:
+                    allsuccessors[cl.rev(s)] = cl.rev(node)
+                except LookupError:
+                    pass
+
+    if allsuccessors:
+        # Look for successors of obsolete nodes to be rebased among
+        # the ancestors of dest
+        ancs = cl.ancestors([repo[dest].rev()],
+                            stoprev=min(allsuccessors),
+                            inclusive=True)
+        for s in allsuccessors:
+            if s in ancs:
+                obsoletenotrebased[allsuccessors[s]] = s
+    return obsoletenotrebased
+
 def summaryhook(ui, repo):
     if not os.path.exists(repo.join('rebasestate')):
         return
@@ -1126,3 +1227,4 @@
          _("use 'hg rebase --continue' or 'hg rebase --abort'")])
     # ensure rebased rev are not hidden
     extensions.wrapfunction(repoview, '_getdynamicblockers', _rebasedvisible)
+    revset.symbols['_destrebase'] = _revsetdestrebase
--- a/hgext/record.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/record.py	Tue Oct 20 15:59:10 2015 -0500
@@ -9,7 +9,7 @@
 
 from mercurial.i18n import _
 from mercurial import cmdutil, commands, extensions
-from mercurial import util
+from mercurial import error
 
 cmdtable = {}
 command = cmdutil.command(cmdtable)
@@ -54,7 +54,7 @@
     This command is not available when committing a merge.'''
 
     if not ui.interactive():
-        raise util.Abort(_('running non-interactively, use %s instead') %
+        raise error.Abort(_('running non-interactively, use %s instead') %
                          'commit')
 
     opts["interactive"] = True
@@ -99,7 +99,7 @@
     try:
         mq = extensions.find('mq')
     except KeyError:
-        raise util.Abort(_("'mq' extension not loaded"))
+        raise error.Abort(_("'mq' extension not loaded"))
 
     repo.mq.checkpatchname(patch)
 
--- a/hgext/relink.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/relink.py	Tue Oct 20 15:59:10 2015 -0500
@@ -7,7 +7,7 @@
 
 """recreates hardlinks between repository clones"""
 
-from mercurial import cmdutil, hg, util
+from mercurial import cmdutil, hg, util, error
 from mercurial.i18n import _
 import os, stat
 
@@ -47,7 +47,7 @@
     """
     if (not util.safehasattr(util, 'samefile') or
         not util.safehasattr(util, 'samedevice')):
-        raise util.Abort(_('hardlinks are not supported on this system'))
+        raise error.Abort(_('hardlinks are not supported on this system'))
     src = hg.repository(repo.baseui, ui.expandpath(origin or 'default-relink',
                                           origin or 'default'))
     ui.status(_('relinking %s to %s\n') % (src.store.path, repo.store.path))
@@ -57,7 +57,7 @@
 
     if not util.samedevice(src.store.path, repo.store.path):
         # No point in continuing
-        raise util.Abort(_('source and destination are on different devices'))
+        raise error.Abort(_('source and destination are on different devices'))
 
     locallock = repo.lock()
     try:
@@ -84,7 +84,7 @@
     total = live * 3 // 2
     src = src.store.path
     pos = 0
-    ui.status(_("tip has %d files, estimated total number of files: %s\n")
+    ui.status(_("tip has %d files, estimated total number of files: %d\n")
               % (live, total))
     for dirpath, dirnames, filenames in os.walk(src):
         dirnames.sort()
@@ -114,7 +114,7 @@
             return False
         if not util.samedevice(src, dst):
             # No point in continuing
-            raise util.Abort(
+            raise error.Abort(
                 _('source and destination are on different devices'))
         if st.st_size != ts.st_size:
             return False
--- a/hgext/schemes.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/schemes.py	Tue Oct 20 15:59:10 2015 -0500
@@ -41,7 +41,7 @@
 """
 
 import os, re
-from mercurial import extensions, hg, templater, util
+from mercurial import extensions, hg, templater, util, error
 from mercurial.i18n import _
 
 # Note for extension authors: ONLY specify testedwith = 'internal' for
@@ -69,7 +69,7 @@
         try:
             url = url.split('://', 1)[1]
         except IndexError:
-            raise util.Abort(_("no '://' in scheme url '%s'") % url)
+            raise error.Abort(_("no '://' in scheme url '%s'") % url)
         parts = url.split('/', self.parts)
         if len(parts) > self.parts:
             tail = parts[-1]
@@ -101,7 +101,7 @@
     for scheme, url in schemes.items():
         if (os.name == 'nt' and len(scheme) == 1 and scheme.isalpha()
             and os.path.exists('%s:\\' % scheme)):
-            raise util.Abort(_('custom scheme %s:// conflicts with drive '
+            raise error.Abort(_('custom scheme %s:// conflicts with drive '
                                'letter %s:\\\n') % (scheme, scheme.upper()))
         hg.schemes[scheme] = ShortRepository(url, scheme, t)
 
--- a/hgext/share.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/share.py	Tue Oct 20 15:59:10 2015 -0500
@@ -38,7 +38,7 @@
 '''
 
 from mercurial.i18n import _
-from mercurial import cmdutil, commands, hg, util, extensions, bookmarks
+from mercurial import cmdutil, commands, hg, util, extensions, bookmarks, error
 from mercurial.hg import repository, parseurl
 import errno
 
@@ -83,7 +83,7 @@
     """
 
     if not repo.shared():
-        raise util.Abort(_("this is not a shared repo"))
+        raise error.Abort(_("this is not a shared repo"))
 
     destlock = lock = None
     lock = repo.lock()
--- a/hgext/shelve.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/shelve.py	Tue Oct 20 15:59:10 2015 -0500
@@ -27,7 +27,7 @@
 from mercurial.node import nullid, nullrev, bin, hex
 from mercurial import changegroup, cmdutil, scmutil, phases, commands
 from mercurial import error, hg, mdiff, merge, patch, repair, util
-from mercurial import templatefilters, exchange, bundlerepo
+from mercurial import templatefilters, exchange, bundlerepo, bundle2
 from mercurial import lock as lockmod
 from hgext import rebase
 import errno
@@ -90,23 +90,40 @@
         except IOError as err:
             if err.errno != errno.ENOENT:
                 raise
-            raise util.Abort(_("shelved change '%s' not found") % self.name)
+            raise error.Abort(_("shelved change '%s' not found") % self.name)
 
     def applybundle(self):
         fp = self.opener()
         try:
             gen = exchange.readbundle(self.repo.ui, fp, self.fname, self.vfs)
-            changegroup.addchangegroup(self.repo, gen, 'unshelve',
-                                       'bundle:' + self.vfs.join(self.fname),
-                                       targetphase=phases.secret)
+            if not isinstance(gen, bundle2.unbundle20):
+                gen.apply(self.repo, 'unshelve',
+                          'bundle:' + self.vfs.join(self.fname),
+                          targetphase=phases.secret)
+            if isinstance(gen, bundle2.unbundle20):
+                bundle2.applybundle(self.repo, gen,
+                                    self.repo.currenttransaction(),
+                                    source='unshelve',
+                                    url='bundle:' + self.vfs.join(self.fname))
         finally:
             fp.close()
 
     def bundlerepo(self):
         return bundlerepo.bundlerepository(self.repo.baseui, self.repo.root,
                                            self.vfs.join(self.fname))
-    def writebundle(self, cg):
-        changegroup.writebundle(self.ui, cg, self.fname, 'HG10UN', self.vfs)
+    def writebundle(self, bases, node):
+        btype = 'HG10BZ'
+        cgversion = '01'
+        compression = None
+        if 'generaldelta' in self.repo.requirements:
+            btype = 'HG20'
+            cgversion = '02'
+            compression = 'BZ'
+
+        cg = changegroup.changegroupsubset(self.repo, bases, [node], 'shelve',
+                                           version=cgversion)
+        changegroup.writebundle(self.ui, cg, self.fname, btype, self.vfs,
+                                compression=compression)
 
 class shelvedstate(object):
     """Handle persistence during unshelving operations.
@@ -124,7 +141,7 @@
             version = int(fp.readline().strip())
 
             if version != cls._version:
-                raise util.Abort(_('this version of shelve is incompatible '
+                raise error.Abort(_('this version of shelve is incompatible '
                                    'with the version used in this repo'))
             name = fp.readline().strip()
             wctx = fp.readline().strip()
@@ -179,11 +196,37 @@
                 if err.errno != errno.ENOENT:
                     raise
 
+def _aborttransaction(repo):
+    '''Abort current transaction for shelve/unshelve, but keep dirstate
+    '''
+    backupname = 'dirstate.shelve'
+    dirstatebackup = None
+    try:
+        # create backup of (un)shelved dirstate, because aborting transaction
+        # should restore dirstate to one at the beginning of the
+        # transaction, which doesn't include the result of (un)shelving
+        fp = repo.vfs.open(backupname, "w")
+        dirstatebackup = backupname
+        # clearing _dirty/_dirtypl of dirstate by _writedirstate below
+        # is unintentional. but it doesn't cause problem in this case,
+        # because no code path refers them until transaction is aborted.
+        repo.dirstate._writedirstate(fp) # write in-memory changes forcibly
+
+        tr = repo.currenttransaction()
+        tr.abort()
+
+        # restore to backuped dirstate
+        repo.vfs.rename(dirstatebackup, 'dirstate')
+        dirstatebackup = None
+    finally:
+        if dirstatebackup:
+            repo.vfs.unlink(dirstatebackup)
+
 def createcmd(ui, repo, pats, opts):
     """subcommand that creates a new shelve"""
 
-    def publicancestors(ctx):
-        """Compute the public ancestors of a commit.
+    def mutableancestors(ctx):
+        """return all mutable ancestors for ctx (included)
 
         Much faster than the revset ancestors(ctx) & draft()"""
         seen = set([nullrev])
@@ -202,7 +245,7 @@
     wctx = repo[None]
     parents = wctx.parents()
     if len(parents) > 1:
-        raise util.Abort(_('cannot shelve while merging'))
+        raise error.Abort(_('cannot shelve while merging'))
     parent = parents[0]
 
     # we never need the user, so we use a generic user for all shelve operations
@@ -242,34 +285,33 @@
 
     name = opts['name']
 
-    wlock = lock = tr = bms = None
+    wlock = lock = tr = None
     try:
         wlock = repo.wlock()
         lock = repo.lock()
 
-        bms = repo._bookmarks.copy()
         # use an uncommitted transaction to generate the bundle to avoid
         # pull races. ensure we don't print the abort message to stderr.
         tr = repo.transaction('commit', report=lambda x: None)
 
         if name:
             if shelvedfile(repo, name, 'hg').exists():
-                raise util.Abort(_("a shelved change named '%s' already exists")
-                                 % name)
+                raise error.Abort(_("a shelved change named '%s' already exists"
+                                   ) % name)
         else:
             for n in gennames():
                 if not shelvedfile(repo, n, 'hg').exists():
                     name = n
                     break
             else:
-                raise util.Abort(_("too many shelved changes named '%s'") %
+                raise error.Abort(_("too many shelved changes named '%s'") %
                                  label)
 
         # ensure we are not creating a subdirectory or a hidden file
         if '/' in name or '\\' in name:
-            raise util.Abort(_('shelved change names may not contain slashes'))
+            raise error.Abort(_('shelved change names may not contain slashes'))
         if name.startswith('.'):
-            raise util.Abort(_("shelved change names may not start with '.'"))
+            raise error.Abort(_("shelved change names may not start with '.'"))
         interactive = opts.get('interactive', False)
 
         def interactivecommitfunc(ui, repo, *pats, **opts):
@@ -290,9 +332,8 @@
                 ui.status(_("nothing changed\n"))
             return 1
 
-        bases = list(publicancestors(repo[node]))
-        cg = changegroup.changegroupsubset(repo, bases, [node], 'shelve')
-        shelvedfile(repo, name, 'hg').writebundle(cg)
+        bases = list(mutableancestors(repo[node]))
+        shelvedfile(repo, name, 'hg').writebundle(bases, node)
         cmdutil.export(repo, [node],
                        fp=shelvedfile(repo, name, 'patch').opener('wb'),
                        opts=mdiff.diffopts(git=True))
@@ -302,14 +343,10 @@
             desc = util.ellipsis(desc, ui.termwidth())
         ui.status(_('shelved as %s\n') % name)
         hg.update(repo, parent.node())
+
+        _aborttransaction(repo)
     finally:
-        if bms:
-            # restore old bookmarks
-            repo._bookmarks.update(bms)
-            repo._bookmarks.write()
-        if tr:
-            tr.abort()
-        lockmod.release(lock, wlock)
+        lockmod.release(tr, lock, wlock)
 
 def cleanupcmd(ui, repo):
     """subcommand that deletes all shelves"""
@@ -328,7 +365,7 @@
 def deletecmd(ui, repo, pats):
     """subcommand that deletes a specific shelve"""
     if not pats:
-        raise util.Abort(_('no shelved changes specified!'))
+        raise error.Abort(_('no shelved changes specified!'))
     wlock = repo.wlock()
     try:
         for name in pats:
@@ -338,7 +375,7 @@
     except OSError as err:
         if err.errno != errno.ENOENT:
             raise
-        raise util.Abort(_("shelved change '%s' not found") % name)
+        raise error.Abort(_("shelved change '%s' not found") % name)
     finally:
         lockmod.release(wlock)
 
@@ -410,18 +447,18 @@
 def singlepatchcmds(ui, repo, pats, opts, subcommand):
     """subcommand that displays a single shelf"""
     if len(pats) != 1:
-        raise util.Abort(_("--%s expects a single shelf") % subcommand)
+        raise error.Abort(_("--%s expects a single shelf") % subcommand)
     shelfname = pats[0]
 
     if not shelvedfile(repo, shelfname, 'patch').exists():
-        raise util.Abort(_("cannot find shelf %s") % shelfname)
+        raise error.Abort(_("cannot find shelf %s") % shelfname)
 
     listcmd(ui, repo, pats, opts)
 
 def checkparents(repo, state):
     """check parent while resuming an unshelve"""
     if state.parents != repo.dirstate.parents():
-        raise util.Abort(_('working directory parents do not match unshelve '
+        raise error.Abort(_('working directory parents do not match unshelve '
                            'state'))
 
 def pathtofiles(repo, files):
@@ -451,9 +488,9 @@
         mergefiles(ui, repo, state.wctx, state.pendingctx)
 
         repair.strip(ui, repo, state.stripnodes, backup=False, topic='shelve')
+    finally:
         shelvedstate.clear(repo)
         ui.warn(_("unshelve of '%s' aborted\n") % state.name)
-    finally:
         lockmod.release(lock, wlock)
 
 def mergefiles(ui, repo, wctx, shelvectx):
@@ -496,7 +533,7 @@
         checkparents(repo, state)
         ms = merge.mergestate(repo)
         if [f for f in ms if ms[f] == 'u']:
-            raise util.Abort(
+            raise error.Abort(
                 _("unresolved conflicts, can't continue"),
                 hint=_("see 'hg resolve', then 'hg unshelve --continue'"))
 
@@ -579,9 +616,9 @@
 
     if abortf or continuef:
         if abortf and continuef:
-            raise util.Abort(_('cannot use both abort and continue'))
+            raise error.Abort(_('cannot use both abort and continue'))
         if shelved:
-            raise util.Abort(_('cannot combine abort/continue with '
+            raise error.Abort(_('cannot combine abort/continue with '
                                'naming a shelved change'))
 
         try:
@@ -589,25 +626,25 @@
         except IOError as err:
             if err.errno != errno.ENOENT:
                 raise
-            raise util.Abort(_('no unshelve operation underway'))
+            raise error.Abort(_('no unshelve operation underway'))
 
         if abortf:
             return unshelveabort(ui, repo, state, opts)
         elif continuef:
             return unshelvecontinue(ui, repo, state, opts)
     elif len(shelved) > 1:
-        raise util.Abort(_('can only unshelve one change at a time'))
+        raise error.Abort(_('can only unshelve one change at a time'))
     elif not shelved:
         shelved = listshelves(repo)
         if not shelved:
-            raise util.Abort(_('no shelved changes to apply!'))
+            raise error.Abort(_('no shelved changes to apply!'))
         basename = util.split(shelved[0][1])[1]
         ui.status(_("unshelving change '%s'\n") % basename)
     else:
         basename = shelved[0]
 
     if not shelvedfile(repo, basename, 'patch').exists():
-        raise util.Abort(_("shelved change '%s' not found") % basename)
+        raise error.Abort(_("shelved change '%s' not found") % basename)
 
     oldquiet = ui.quiet
     wlock = lock = tr = None
@@ -700,6 +737,8 @@
         repo.unfiltered().changelog.strip(oldtiprev, tr)
 
         unshelvecleanup(ui, repo, basename, opts)
+
+        _aborttransaction(repo)
     finally:
         ui.quiet = oldquiet
         if tr:
@@ -775,12 +814,12 @@
         if opts[opt]:
             for i, allowable in allowables:
                 if opts[i] and opt not in allowable:
-                    raise util.Abort(_("options '--%s' and '--%s' may not be "
+                    raise error.Abort(_("options '--%s' and '--%s' may not be "
                                        "used together") % (opt, i))
             return True
     if checkopt('cleanup'):
         if pats:
-            raise util.Abort(_("cannot specify names when using '--cleanup'"))
+            raise error.Abort(_("cannot specify names when using '--cleanup'"))
         return cleanupcmd(ui, repo)
     elif checkopt('delete'):
         return deletecmd(ui, repo, pats)
--- a/hgext/strip.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/strip.py	Tue Oct 20 15:59:10 2015 -0500
@@ -6,7 +6,7 @@
 from mercurial.i18n import _
 from mercurial.node import nullid
 from mercurial.lock import release
-from mercurial import cmdutil, hg, scmutil, util
+from mercurial import cmdutil, hg, scmutil, util, error
 from mercurial import repair, bookmarks, merge
 
 cmdtable = {}
@@ -38,10 +38,10 @@
     if not force:
         if s.modified or s.added or s.removed or s.deleted:
             _("local changes found") # i18n tool detection
-            raise util.Abort(_("local changes found" + excsuffix))
+            raise error.Abort(_("local changes found" + excsuffix))
         if checksubstate(repo):
             _("local changed subrepos found") # i18n tool detection
-            raise util.Abort(_("local changed subrepos found" + excsuffix))
+            raise error.Abort(_("local changed subrepos found" + excsuffix))
     return s
 
 def strip(ui, repo, revs, update=True, backup=True, force=None, bookmark=None):
@@ -58,7 +58,7 @@
                 and p2 in [x.node for x in repo.mq.applied]):
                 urev = p2
             hg.clean(repo, urev)
-            repo.dirstate.write()
+            repo.dirstate.write(repo.currenttransaction())
 
         repair.strip(ui, repo, revs, backup)
 
@@ -131,7 +131,7 @@
             mark = opts.get('bookmark')
             marks = repo._bookmarks
             if mark not in marks:
-                raise util.Abort(_("bookmark '%s' not found") % mark)
+                raise error.Abort(_("bookmark '%s' not found") % mark)
 
             # If the requested bookmark is not the only one pointing to a
             # a revision we have to only delete the bookmark and not strip
@@ -142,10 +142,7 @@
                     uniquebm = False
                     break
             if uniquebm:
-                rsrevs = repo.revs("ancestors(bookmark(%s)) - "
-                                   "ancestors(head() and not bookmark(%s)) - "
-                                   "ancestors(bookmark() and not bookmark(%s))",
-                                   mark, mark, mark)
+                rsrevs = repair.stripbmrevset(repo, mark)
                 revs.update(set(rsrevs))
             if not revs:
                 del marks[mark]
@@ -153,7 +150,7 @@
                 ui.write(_("bookmark '%s' deleted\n") % mark)
 
         if not revs:
-            raise util.Abort(_('empty revision set'))
+            raise error.Abort(_('empty revision set'))
 
         descendants = set(cl.descendants(revs))
         strippedrevs = revs.union(descendants)
@@ -208,7 +205,7 @@
             changedfiles.extend(dirchanges)
 
             repo.dirstate.rebuild(urev, uctx.manifest(), changedfiles)
-            repo.dirstate.write()
+            repo.dirstate.write(repo.currenttransaction())
 
             # clear resolve state
             ms = merge.mergestate(repo)
--- a/hgext/transplant.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/transplant.py	Tue Oct 20 15:59:10 2015 -0500
@@ -117,18 +117,19 @@
                 return True
         return False
 
-    def apply(self, repo, source, revmap, merges, opts={}):
+    def apply(self, repo, source, revmap, merges, opts=None):
         '''apply the revisions in revmap one by one in revision order'''
+        if opts is None:
+            opts = {}
         revs = sorted(revmap)
         p1, p2 = repo.dirstate.parents()
         pulls = []
         diffopts = patch.difffeatureopts(self.ui, opts)
         diffopts.git = True
 
-        lock = wlock = tr = dsguard = None
+        lock = wlock = tr = None
         try:
             wlock = repo.wlock()
-            dsguard = cmdutil.dirstateguard(repo, 'transplant')
             lock = repo.lock()
             tr = repo.transaction('transplant')
             for rev in revs:
@@ -173,7 +174,7 @@
                     else:
                         parent = source.lookup(opts['parent'])
                         if parent not in parents:
-                            raise util.Abort(_('%s is not a parent of %s') %
+                            raise error.Abort(_('%s is not a parent of %s') %
                                              (short(parent), short(node)))
                 else:
                     parent = parents[0]
@@ -201,7 +202,6 @@
                             # Do not rollback, it is up to the user to
                             # fix the merge or cancel everything
                             tr.close()
-                            dsguard.close()
                             raise
                         if n and domerge:
                             self.ui.status(_('%s merged at %s\n') % (revstr,
@@ -214,7 +214,6 @@
                         if patchfile:
                             os.unlink(patchfile)
             tr.close()
-            dsguard.close()
             if pulls:
                 exchange.pull(repo, source.peer(), heads=pulls)
                 merge.update(repo, pulls[-1], False, False, None)
@@ -225,8 +224,6 @@
                 tr.release()
             if lock:
                 lock.release()
-            if dsguard:
-                dsguard.release()
             wlock.release()
 
     def filter(self, filter, node, changelog, patchfile):
@@ -248,7 +245,7 @@
                            environ={'HGUSER': changelog[1],
                                     'HGREVISION': revlog.hex(node),
                                     },
-                           onerr=util.Abort, errprefix=_('filter failed'))
+                           onerr=error.Abort, errprefix=_('filter failed'))
             user, date, msg = self.parselog(file(headerfile))[1:4]
         finally:
             os.unlink(headerfile)
@@ -272,7 +269,7 @@
         self.ui.note('%s %s\n%s\n' % (user, date, message))
 
         if not patchfile and not merge:
-            raise util.Abort(_('can only omit patchfile if merging'))
+            raise error.Abort(_('can only omit patchfile if merging'))
         if patchfile:
             try:
                 files = set()
@@ -335,14 +332,14 @@
         merge = False
 
         if not user or not date or not message or not parents[0]:
-            raise util.Abort(_('transplant log file is corrupt'))
+            raise error.Abort(_('transplant log file is corrupt'))
 
         parent = parents[0]
         if len(parents) > 1:
             if opts.get('parent'):
                 parent = source.lookup(opts['parent'])
                 if parent not in parents:
-                    raise util.Abort(_('%s is not a parent of %s') %
+                    raise error.Abort(_('%s is not a parent of %s') %
                                      (short(parent), short(node)))
             else:
                 merge = True
@@ -352,7 +349,7 @@
         try:
             p1, p2 = repo.dirstate.parents()
             if p1 != parent:
-                raise util.Abort(_('working directory not at transplant '
+                raise error.Abort(_('working directory not at transplant '
                                    'parent %s') % revlog.hex(parent))
             if merge:
                 repo.setparents(p1, parents[1])
@@ -361,7 +358,7 @@
                 n = repo.commit(message, user, date, extra=extra,
                                 editor=self.getcommiteditor())
                 if not n:
-                    raise util.Abort(_('commit failed'))
+                    raise error.Abort(_('commit failed'))
                 if not merge:
                     self.transplants.set(n, node)
             else:
@@ -421,7 +418,7 @@
                 inmsg = True
                 message.append(line)
         if None in (user, date):
-            raise util.Abort(_("filter corrupted changeset (no user or date)"))
+            raise error.Abort(_("filter corrupted changeset (no user or date)"))
         return (node, user, date, '\n'.join(message), parents)
 
     def log(self, user, date, message, p1, p2, merge=False):
@@ -597,18 +594,18 @@
     def checkopts(opts, revs):
         if opts.get('continue'):
             if opts.get('branch') or opts.get('all') or opts.get('merge'):
-                raise util.Abort(_('--continue is incompatible with '
+                raise error.Abort(_('--continue is incompatible with '
                                    '--branch, --all and --merge'))
             return
         if not (opts.get('source') or revs or
                 opts.get('merge') or opts.get('branch')):
-            raise util.Abort(_('no source URL, branch revision or revision '
+            raise error.Abort(_('no source URL, branch revision or revision '
                                'list provided'))
         if opts.get('all'):
             if not opts.get('branch'):
-                raise util.Abort(_('--all requires a branch revision'))
+                raise error.Abort(_('--all requires a branch revision'))
             if revs:
-                raise util.Abort(_('--all is incompatible with a '
+                raise error.Abort(_('--all is incompatible with a '
                                    'revision list'))
 
     checkopts(opts, revs)
@@ -625,13 +622,13 @@
     cmdutil.checkunfinished(repo)
     p1, p2 = repo.dirstate.parents()
     if len(repo) > 0 and p1 == revlog.nullid:
-        raise util.Abort(_('no revision checked out'))
+        raise error.Abort(_('no revision checked out'))
     if not opts.get('continue'):
         if p2 != revlog.nullid:
-            raise util.Abort(_('outstanding uncommitted merges'))
+            raise error.Abort(_('outstanding uncommitted merges'))
         m, a, r, d = repo.status()[:4]
         if m or a or r or d:
-            raise util.Abort(_('outstanding local changes'))
+            raise error.Abort(_('outstanding local changes'))
 
     sourcerepo = opts.get('source')
     if sourcerepo:
--- a/hgext/win32mbcs.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/win32mbcs.py	Tue Oct 20 15:59:10 2015 -0500
@@ -47,7 +47,7 @@
 
 import os, sys
 from mercurial.i18n import _
-from mercurial import util, encoding
+from mercurial import error, encoding
 # Note for extension authors: ONLY specify testedwith = 'internal' for
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
@@ -105,7 +105,7 @@
         # return value.
         return enc(func(*dec(args), **dec(kwds)))
     except UnicodeError:
-        raise util.Abort(_("[win32mbcs] filename conversion failed with"
+        raise error.Abort(_("[win32mbcs] filename conversion failed with"
                          " %s encoding\n") % (_encoding))
 
 def wrapper(func, args, kwds):
--- a/hgext/win32text.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgext/win32text.py	Tue Oct 20 15:59:10 2015 -0500
@@ -62,7 +62,7 @@
     # warn if already has 'newline' in repository.
     # it might cause unexpected eol conversion.
     # see issue 302:
-    #   http://mercurial.selenic.com/bts/issue302
+    #   https://bz.mercurial-scm.org/302
     if newline in s and ui and filename and repo:
         ui.warn(_('WARNING: %s already has %s line endings\n'
                   'and does not need EOL conversion by the win32text plugin.\n'
@@ -174,4 +174,4 @@
     # deprecated config: win32text.warn
     if ui.configbool('win32text', 'warn', True):
         ui.warn(_("win32text is deprecated: "
-                  "http://mercurial.selenic.com/wiki/Win32TextExtension\n"))
+                  "https://mercurial-scm.org/wiki/Win32TextExtension\n"))
--- a/hgweb.cgi	Thu Oct 08 23:24:38 2015 +0900
+++ b/hgweb.cgi	Tue Oct 20 15:59:10 2015 -0500
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 #
 # An example hgweb CGI script, edit as necessary
-# See also http://mercurial.selenic.com/wiki/PublishingRepositories
+# See also https://mercurial-scm.org/wiki/PublishingRepositories
 
 # Path to repo or hgweb config to serve (see 'hg help hgweb')
 config = "/path/to/repo/or/config"
--- a/i18n/check-translation.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/i18n/check-translation.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,8 +5,15 @@
 import polib
 import re
 
+scanners = []
 checkers = []
 
+def scanner():
+    def decorator(func):
+        scanners.append(func)
+        return func
+    return decorator
+
 def levelchecker(level, msgidpat):
     def decorator(func):
         if msgidpat:
@@ -61,6 +68,46 @@
     if [c for c, i in indices if len(c) == i + 1]:
         yield "msgstr has invalid '&' followed by none"
 
+deprecatedpe = None
+@scanner()
+def deprecatedsetup(pofile):
+    pes = [p for p in pofile if p.msgid == 'DEPRECATED']
+    if len(pes):
+        global deprecatedpe
+        deprecatedpe = pes[0]
+
+@fatalchecker('(DEPRECATED)')
+def deprecated(pe):
+    """Check for DEPRECATED
+    >>> ped = polib.POEntry(
+    ...     msgid = 'DEPRECATED',
+    ...     msgstr= 'DETACERPED')
+    >>> deprecatedsetup([ped])
+    >>> pe = polib.POEntry(
+    ...     msgid = 'Something (DEPRECATED)',
+    ...     msgstr= 'something (DEPRECATED)')
+    >>> match(deprecated, pe)
+    True
+    >>> for e in deprecated(pe): print e
+    >>> pe = polib.POEntry(
+    ...     msgid = 'Something (DEPRECATED)',
+    ...     msgstr= 'something (DETACERPED)')
+    >>> match(deprecated, pe)
+    True
+    >>> for e in deprecated(pe): print e
+    >>> pe = polib.POEntry(
+    ...     msgid = 'Something (DEPRECATED)',
+    ...     msgstr= 'something')
+    >>> match(deprecated, pe)
+    True
+    >>> for e in deprecated(pe): print e
+    msgstr inconsistently translated (DEPRECATED)
+    """
+    if not ('(DEPRECATED)' in pe.msgstr or
+            (deprecatedpe and deprecatedpe.msgstr and
+             deprecatedpe.msgstr in pe.msgstr)):
+        yield "msgstr inconsistently translated (DEPRECATED)"
+
 ####################
 
 def warningchecker(msgidpat=None):
@@ -117,6 +164,8 @@
         return []
 
     detected = []
+    for checker in scanners:
+        checker(pofile)
     for pe in pofile.translated_entries():
         errors = []
         for checker, level in targetcheckers:
--- a/i18n/da.po	Thu Oct 08 23:24:38 2015 +0900
+++ b/i18n/da.po	Tue Oct 20 15:59:10 2015 -0500
@@ -9367,7 +9367,7 @@
 msgstr ""
 
 msgid "DEPRECATED"
-msgstr ""
+msgstr "FORÆLDET"
 
 msgid ""
 "\n"
--- a/i18n/fr.po	Thu Oct 08 23:24:38 2015 +0900
+++ b/i18n/fr.po	Tue Oct 20 15:59:10 2015 -0500
@@ -4014,7 +4014,7 @@
 msgstr "garder le fichier du patch"
 
 msgid "stop managing a revision (DEPRECATED)"
-msgstr "arrêter de gérer une révision"
+msgstr ""
 
 msgid "hg qdelete [-k] [-r REV]... [PATCH]..."
 msgstr "hg qdelete [-k] [-r REV]... [PATCH]..."
--- a/i18n/it.po	Thu Oct 08 23:24:38 2015 +0900
+++ b/i18n/it.po	Tue Oct 20 15:59:10 2015 -0500
@@ -7506,7 +7506,7 @@
 msgstr ""
 
 msgid "DEPRECATED"
-msgstr ""
+msgstr "DEPRECATO"
 
 msgid ""
 "\n"
--- a/i18n/pt_BR.po	Thu Oct 08 23:24:38 2015 +0900
+++ b/i18n/pt_BR.po	Tue Oct 20 15:59:10 2015 -0500
@@ -3821,7 +3821,7 @@
 msgstr "factotum não está respondendo"
 
 msgid "pull, update and merge in one command (DEPRECATED)"
-msgstr "pull, update e merge em um comando (OBSOLETA)"
+msgstr "pull, update e merge em um comando (OBSOLETO)"
 
 msgid "a specific revision you would like to pull"
 msgstr "uma revisão específica que você gostaria de trazer"
@@ -4082,7 +4082,7 @@
 msgstr "revisões de autoria do usuário"
 
 msgid "show only changesets within the given named branch (DEPRECATED)"
-msgstr "mostra apenas revisões dentro do ramo nomeado especificado (OBSOLETA)"
+msgstr "mostra apenas revisões dentro do ramo nomeado especificado (OBSOLETO)"
 
 msgid "BRANCH"
 msgstr "RAMO"
@@ -8342,7 +8342,7 @@
 msgstr "escrevendo"
 
 msgid "show progress bars for some actions (DEPRECATED)"
-msgstr "mostra barras de progresso para algumas ações (OBSOLETA)"
+msgstr "mostra barras de progresso para algumas ações (OBSOLETO)"
 
 msgid ""
 "This extension has been merged into core, you can remove it from your config.\n"
@@ -8505,7 +8505,7 @@
 msgstr "mantém nomes de ramos originais"
 
 msgid "(DEPRECATED)"
-msgstr "(OBSOLETA)"
+msgstr "(OBSOLETO)"
 
 msgid "specify merge tool"
 msgstr "especifica o utilitário de mesclagem"
@@ -11140,7 +11140,7 @@
 msgstr "faz um annotate da revisão especificada"
 
 msgid "follow copies/renames and list the filename (DEPRECATED)"
-msgstr "segue cópias e renomeações e lista o nome de arquivo (OBSOLETA)"
+msgstr "segue cópias e renomeações e lista o nome de arquivo (OBSOLETO)"
 
 msgid "don't follow copies and renames"
 msgstr "não segue cópias e renomeações"
@@ -12862,7 +12862,7 @@
 msgid "backwards compatibility with old bash completion scripts (DEPRECATED)"
 msgstr ""
 "compatibilidade retroativa com antigos scripts bash de completação "
-"(OBSOLETA)"
+"(OBSOLETO)"
 
 msgid "NAME..."
 msgstr "NOME..."
@@ -13783,7 +13783,7 @@
 msgstr "mostra apenas cabeças topológicas"
 
 msgid "show active branchheads only (DEPRECATED)"
-msgstr "mostra apenas cabeças de ramo ativas (OBSOLETA)"
+msgstr "mostra apenas cabeças de ramo ativas (OBSOLETO)"
 
 msgid "show normal and closed branch heads"
 msgstr "mostra cabeças de ramo normais e fechadas"
@@ -15606,7 +15606,7 @@
 msgstr "nome do arquivo de configuração do hgweb (veja \"hg help hgweb\")"
 
 msgid "name of the hgweb config file (DEPRECATED)"
-msgstr "nome do arquivo de configuração do hgweb (OBSOLETA)"
+msgstr "nome do arquivo de configuração do hgweb (OBSOLETO)"
 
 msgid "name of file to write process ID to"
 msgstr "nome do arquivo no qual escrever o ID do processo"
@@ -21151,7 +21151,7 @@
 "    Default is False."
 msgstr ""
 "``allowbz2``\n"
-"    (OBSOLETA) Determina se revisões estarão disponíveis para download\n"
+"    (OBSOLETO) Determina se revisões estarão disponíveis para download\n"
 "    em formato .tar.bz2.\n"
 "    O padrão é False."
 
@@ -21162,7 +21162,7 @@
 "    Default is False."
 msgstr ""
 "``allowgz``\n"
-"    (OBSOLETA) Determina se revisões estarão disponíveis para download\n"
+"    (OBSOLETO) Determina se revisões estarão disponíveis para download\n"
 "    em formato .tar.gz.\n"
 "    O padrão é False."
 
@@ -21223,7 +21223,7 @@
 "    revisions. Default is False. This feature creates temporary files."
 msgstr ""
 "``allowzip``\n"
-"    (OBSOLETA) Determina se revisões estarão disponíveis para download\n"
+"    (OBSOLETO) Determina se revisões estarão disponíveis para download\n"
 "    em formato .zip.\n"
 "    O padrão é False."
 
--- a/i18n/ro.po	Thu Oct 08 23:24:38 2015 +0900
+++ b/i18n/ro.po	Tue Oct 20 15:59:10 2015 -0500
@@ -8063,7 +8063,7 @@
 msgstr "afișează doar capetele topologice"
 
 msgid "show active branchheads only (DEPRECATED)"
-msgstr "afișează doar capetele de ramură active [ÎNVECHIT]"
+msgstr "afișează doar capetele de ramură active (ÎNVECHIT)"
 
 msgid "show normal and closed branch heads"
 msgstr "afișează capetele de ramură normale și închise"
@@ -8259,7 +8259,7 @@
 msgstr "VALOARE"
 
 msgid "DEPRECATED"
-msgstr ""
+msgstr "ÎNVECHIT"
 
 msgid ""
 "\n"
--- a/i18n/sv.po	Thu Oct 08 23:24:38 2015 +0900
+++ b/i18n/sv.po	Tue Oct 20 15:59:10 2015 -0500
@@ -826,7 +826,7 @@
 msgstr "Bugzilla-fel: %s"
 
 msgid "command to display child changesets (DEPRECATED)"
-msgstr "kommando för att visa barnändringar (FÖRLEGAD)"
+msgstr "kommando för att visa barnändringar (FÖRÅLDRAD)"
 
 msgid ""
 "This extension is deprecated. You should use :hg:`log -r\n"
@@ -2741,7 +2741,7 @@
 "följ ändringshistorik, eller filhistorik över kopieringar och namnbyten"
 
 msgid "only follow the first parent of merge changesets (DEPRECATED)"
-msgstr "följ bara den första föräldern vid sammanfogningar (FÖRLEGAD)"
+msgstr "följ bara den första föräldern vid sammanfogningar (FÖRÅLDRAD)"
 
 msgid "show revisions matching date spec"
 msgstr "visa revisioner som matchar datumspecen"
@@ -2759,7 +2759,7 @@
 msgstr "inkludera revisioner där filer togs bort"
 
 msgid "show only merges (DEPRECATED)"
-msgstr "visa bara sammanfogningar (FÖRLEGAD)"
+msgstr "visa bara sammanfogningar (FÖRÅLDRAD)"
 
 msgid "USER"
 msgstr "ANVÄNDARE"
@@ -2768,7 +2768,7 @@
 msgstr "revisioner arkiverade av användare"
 
 msgid "show only changesets within the given named branch (DEPRECATED)"
-msgstr "visa bara ändringar i den namngivna grenen (FÖRLEGAD)"
+msgstr "visa bara ändringar i den namngivna grenen (FÖRÅLDRAD)"
 
 msgid "BRANCH"
 msgstr "GREN"
@@ -2780,7 +2780,7 @@
 msgstr "visa inte revision eller någon av dess föräldrar"
 
 msgid "show hidden changesets (DEPRECATED)"
-msgstr "visa dolda ändringar (FÖRLEGAD)"
+msgstr "visa dolda ändringar (FÖRÅLDRAD)"
 
 msgid "[OPTION]... [FILE]"
 msgstr "[FLAGGA]... [FIL]"
@@ -5011,7 +5011,7 @@
 msgstr "skriv ut namnet på föregående applicerade patch"
 
 msgid "import uncommitted changes (DEPRECATED)"
-msgstr "importera icke arkiverade ändringar (FÖRLEGAD)"
+msgstr "importera icke arkiverade ändringar (FÖRÅLDRAD)"
 
 msgid "add \"From: <current user>\" to patch"
 msgstr "lägg till \"From: <denna användare>\" i patch"
@@ -5306,7 +5306,7 @@
 msgstr "poppa alla patchar"
 
 msgid "queue name to pop (DEPRECATED)"
-msgstr "könamn att poppa (FÖRLEGAD)"
+msgstr "könamn att poppa (FÖRÅLDRAD)"
 
 msgid "forget any local changes to patched files"
 msgstr ""
@@ -5404,7 +5404,7 @@
 "not descendants of REV (DEPRECATED)"
 msgstr ""
 "bunta bara ändringar med lokala revisionsnummer större än REV som inte är "
-"ättling till REV (FÖRLEGAD)"
+"ättling till REV (FÖRÅLDRAD)"
 
 msgid "no backups"
 msgstr "inga säkerhetskopior"
@@ -6537,7 +6537,7 @@
 msgstr ""
 
 msgid "(DEPRECATED)"
-msgstr "(FÖRLEGAD)"
+msgstr "(FÖRÅLDRAD)"
 
 msgid "specify merge tool"
 msgstr "ange sammanfogningsverktyg"
@@ -8088,7 +8088,7 @@
 msgstr "annotera den angivna revisionen"
 
 msgid "follow copies/renames and list the filename (DEPRECATED)"
-msgstr "följ kopieringar/namnbyten och visa filnamnet (FÖRLEGAD)"
+msgstr "följ kopieringar/namnbyten och visa filnamnet (FÖRÅLDRAD)"
 
 msgid "don't follow copies and renames"
 msgstr "följ inte kopieringar och namnbyten"
@@ -8247,7 +8247,7 @@
 msgstr "sammanfoga med gamla dirstate-föräldern efter återkallning"
 
 msgid "parent to choose when backing out merge (DEPRECATED)"
-msgstr "förälder att välja när en sammanfogning återkallas (FÖRLEGAD)"
+msgstr "förälder att välja när en sammanfogning återkallas (FÖRÅLDRAD)"
 
 msgid "revision to backout"
 msgstr "revision att återkalla"
@@ -10105,7 +10105,7 @@
 msgstr "visa bara topologiska huvuden"
 
 msgid "show active branchheads only (DEPRECATED)"
-msgstr "visa bara aktiva grenhuvuden (FÖRLEGAD)"
+msgstr "visa bara aktiva grenhuvuden (FÖRÅLDRAD)"
 
 msgid "show normal and closed branch heads"
 msgstr "visa normala och stängda grenhuvuden"
@@ -11764,7 +11764,7 @@
 msgstr "namn på webdir-konfigurationsfil (se \"hg help hgweb\")"
 
 msgid "name of the hgweb config file (DEPRECATED)"
-msgstr "namn på webdir-konfigurationsfil (FÖRLEGAD)"
+msgstr "namn på webdir-konfigurationsfil (FÖRÅLDRAD)"
 
 msgid "for remote clients"
 msgstr "för fjärrklienter"
@@ -13116,7 +13116,7 @@
 msgstr "VÄRDE"
 
 msgid "DEPRECATED"
-msgstr "FÖRLEGAD"
+msgstr "FÖRÅLDRAD"
 
 msgid ""
 "\n"
--- a/i18n/zh_CN.po	Thu Oct 08 23:24:38 2015 +0900
+++ b/i18n/zh_CN.po	Tue Oct 20 15:59:10 2015 -0500
@@ -5091,7 +5091,7 @@
 msgid "HG: Leave message empty to abort commit."
 msgstr ""
 
-#, fuzzy, python-format
+#, fuzzy, python-format, broken
 msgid "HG: user: %s"
 msgstr "用户: %s\n"
 
@@ -5102,7 +5102,7 @@
 msgid "HG: branch '%s'"
 msgstr ""
 
-#, fuzzy, python-format
+#, fuzzy, python-format, broken
 msgid "HG: subrepo %s"
 msgstr "已删除"
 
@@ -5114,11 +5114,11 @@
 msgid "HG: changed %s"
 msgstr ""
 
-#, fuzzy, python-format
+#, fuzzy, python-format, broken
 msgid "HG: removed %s"
 msgstr "已删除"
 
-#, fuzzy
+#, fuzzy, broken
 msgid "HG: no files changed"
 msgstr "正在增加文件改变\n"
 
@@ -10443,3 +10443,6 @@
 
 msgid "user name not available - set USERNAME environment variable"
 msgstr ""
+
+msgid "DEPRECATED"
+msgstr "不赞成"
--- a/mercurial/ancestor.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/ancestor.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,9 +5,12 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
+from __future__ import absolute_import
+
 import collections
 import heapq
-from node import nullrev
+
+from .node import nullrev
 
 def commonancestorsheads(pfunc, *nodes):
     """Returns a set with the heads of all common ancestors of all nodes,
--- a/mercurial/archival.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/archival.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,14 +5,27 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-from i18n import _
-import match as matchmod
-import cmdutil
-import scmutil, util, encoding
-import cStringIO, os, tarfile, time, zipfile
-import zlib, gzip
+from __future__ import absolute_import
+
+import cStringIO
+import gzip
+import os
 import struct
-import error
+import tarfile
+import time
+import zipfile
+import zlib
+
+from .i18n import _
+
+from . import (
+    cmdutil,
+    encoding,
+    error,
+    match as matchmod,
+    scmutil,
+    util,
+)
 
 # from unzip source code:
 _UNX_IFREG = 0x8000
@@ -42,7 +55,7 @@
     if prefix.startswith('./'):
         prefix = prefix[2:]
     if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
-        raise util.Abort(_('archive prefix contains illegal components'))
+        raise error.Abort(_('archive prefix contains illegal components'))
     return prefix
 
 exts = {
@@ -111,11 +124,7 @@
         def _write_gzip_header(self):
             self.fileobj.write('\037\213')             # magic header
             self.fileobj.write('\010')                 # compression method
-            # Python 2.6 introduced self.name and deprecated self.filename
-            try:
-                fname = self.name
-            except AttributeError:
-                fname = self.filename
+            fname = self.name
             if fname and fname.endswith('.gz'):
                 fname = fname[:-3]
             flags = 0
@@ -283,7 +292,7 @@
 
     if kind == 'files':
         if prefix:
-            raise util.Abort(_('cannot give prefix when archiving to files'))
+            raise error.Abort(_('cannot give prefix when archiving to files'))
     else:
         prefix = tidyprefix(dest, kind, prefix)
 
@@ -294,7 +303,7 @@
         archiver.addfile(prefix + name, mode, islink, data)
 
     if kind not in archivers:
-        raise util.Abort(_("unknown archive type '%s'") % kind)
+        raise error.Abort(_("unknown archive type '%s'") % kind)
 
     ctx = repo[node]
     archiver = archivers[kind](dest, mtime or ctx.date()[0])
--- a/mercurial/base85.c	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/base85.c	Tue Oct 20 15:59:10 2015 -0500
@@ -21,7 +21,7 @@
 static void
 b85prep(void)
 {
-	int i;
+	unsigned i;
 
 	memset(b85dec, 0, sizeof(b85dec));
 	for (i = 0; i < sizeof(b85chars); i++)
--- a/mercurial/bookmarks.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/bookmarks.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,11 +5,22 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
+from __future__ import absolute_import
+
+import errno
 import os
-from mercurial.i18n import _
-from mercurial.node import hex, bin
-from mercurial import encoding, util, obsolete, lock as lockmod
-import errno
+
+from .i18n import _
+from .node import (
+    bin,
+    hex,
+)
+from . import (
+    encoding,
+    lock as lockmod,
+    obsolete,
+    util,
+)
 
 class bmstore(dict):
     """Storage for bookmarks.
@@ -79,6 +90,19 @@
         can be copied back on rollback.
         '''
         repo = self._repo
+        if (repo.ui.configbool('devel', 'all-warnings')
+                or repo.ui.configbool('devel', 'check-locks')):
+            l = repo._wlockref and repo._wlockref()
+            if l is None or not l.held:
+                repo.ui.develwarn('bookmarks write with no wlock')
+
+        tr = repo.currenttransaction()
+        if tr:
+            self.recordchange(tr)
+            # invalidatevolatilesets() is omitted because this doesn't
+            # write changes out actually
+            return
+
         self._writerepo(repo)
         repo.invalidatevolatilesets()
 
@@ -147,7 +171,7 @@
 
 def deactivate(repo):
     """
-    Unset the active bookmark in this reposiotry.
+    Unset the active bookmark in this repository.
     """
     wlock = repo.wlock()
     try:
--- a/mercurial/branchmap.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/branchmap.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,13 +5,28 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-from node import bin, hex, nullid, nullrev
-import encoding
-import scmutil
-import util
+from __future__ import absolute_import
+
+import array
+import struct
 import time
-from array import array
-from struct import calcsize, pack, unpack
+
+from .node import (
+    bin,
+    hex,
+    nullid,
+    nullrev,
+)
+from . import (
+    encoding,
+    error,
+    scmutil,
+)
+
+array = array.array
+calcsize = struct.calcsize
+pack = struct.pack
+unpack = struct.unpack
 
 def _filename(repo):
     """name of a branchcache file for a given repo or repoview"""
@@ -101,6 +116,38 @@
     assert partial.validfor(repo), filtername
     repo._branchcaches[repo.filtername] = partial
 
+def replacecache(repo, bm):
+    """Replace the branchmap cache for a repo with a branch mapping.
+
+    This is likely only called during clone with a branch map from a remote.
+    """
+    rbheads = []
+    closed = []
+    for bheads in bm.itervalues():
+        rbheads.extend(bheads)
+        for h in bheads:
+            r = repo.changelog.rev(h)
+            b, c = repo.changelog.branchinfo(r)
+            if c:
+                closed.append(h)
+
+    if rbheads:
+        rtiprev = max((int(repo.changelog.rev(node))
+                for node in rbheads))
+        cache = branchcache(bm,
+                            repo[rtiprev].node(),
+                            rtiprev,
+                            closednodes=closed)
+
+        # Try to stick it as low as possible
+        # filter above served are unlikely to be fetch from a clone
+        for candidate in ('base', 'immutable', 'served'):
+            rview = repo.filtered(candidate)
+            if cache.validfor(rview):
+                repo._branchcaches[candidate] = cache
+                cache.write(rview)
+                break
+
 class branchcache(dict):
     """A dict like object that hold branches heads cache.
 
@@ -203,7 +250,7 @@
             repo.ui.log('branchcache',
                         'wrote %s branch cache with %d labels and %d nodes\n',
                         repo.filtername, len(self), nodecount)
-        except (IOError, OSError, util.Abort) as inst:
+        except (IOError, OSError, error.Abort) as inst:
             repo.ui.debug("couldn't write branch cache: %s\n" % inst)
             # Abort may be raise by read only opener
             pass
@@ -418,7 +465,7 @@
                                   for b in self._names[self._rbcnamescount:]))
                 self._rbcsnameslen = f.tell()
                 f.close()
-            except (IOError, OSError, util.Abort) as inst:
+            except (IOError, OSError, error.Abort) as inst:
                 repo.ui.debug("couldn't write revision branch cache names: "
                               "%s\n" % inst)
                 return
@@ -436,7 +483,7 @@
                 end = revs * _rbcrecsize
                 f.write(self._rbcrevs[start:end])
                 f.close()
-            except (IOError, OSError, util.Abort) as inst:
+            except (IOError, OSError, error.Abort) as inst:
                 repo.ui.debug("couldn't write revision branch cache: %s\n" %
                               inst)
                 return
--- a/mercurial/bundle2.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/bundle2.py	Tue Oct 20 15:59:10 2015 -0500
@@ -145,19 +145,25 @@
 preserve.
 """
 
+from __future__ import absolute_import
+
 import errno
-import sys
-import util
-import struct
-import urllib
+import re
 import string
-import obsolete
-import pushkey
-import url
-import re
+import struct
+import sys
+import urllib
 
-import changegroup, error, tags
-from i18n import _
+from .i18n import _
+from . import (
+    changegroup,
+    error,
+    obsolete,
+    pushkey,
+    tags,
+    url,
+    util,
+)
 
 _pack = struct.pack
 _unpack = struct.unpack
@@ -296,6 +302,15 @@
     to be created"""
     raise TransactionUnavailable()
 
+def applybundle(repo, unbundler, tr, source=None, url=None, op=None):
+    # transform me into unbundler.apply() as soon as the freeze is lifted
+    tr.hookargs['bundle2'] = '1'
+    if source is not None and 'source' not in tr.hookargs:
+        tr.hookargs['source'] = source
+    if url is not None and 'url' not in tr.hookargs:
+        tr.hookargs['url'] = url
+    return processbundle(repo, unbundler, lambda: tr, op=op)
+
 def processbundle(repo, unbundler, transactiongetter=None, op=None):
     """This function process a bundle, apply effect to/from a repo
 
@@ -370,17 +385,17 @@
             handler = parthandlermapping.get(part.type)
             if handler is None:
                 status = 'unsupported-type'
-                raise error.UnsupportedPartError(parttype=part.type)
+                raise error.BundleUnknownFeatureError(parttype=part.type)
             indebug(op.ui, 'found a handler for part %r' % part.type)
             unknownparams = part.mandatorykeys - handler.params
             if unknownparams:
                 unknownparams = list(unknownparams)
                 unknownparams.sort()
                 status = 'unsupported-params (%s)' % unknownparams
-                raise error.UnsupportedPartError(parttype=part.type,
-                                               params=unknownparams)
+                raise error.BundleUnknownFeatureError(parttype=part.type,
+                                                      params=unknownparams)
             status = 'supported'
-        except error.UnsupportedPartError as exc:
+        except error.BundleUnknownFeatureError as exc:
             if part.mandatory: # mandatory parts
                 raise
             indebug(op.ui, 'ignoring unsupported advisory part %s' % exc)
@@ -473,6 +488,15 @@
         self._params = []
         self._parts = []
         self.capabilities = dict(capabilities)
+        self._compressor = util.compressors[None]()
+
+    def setcompression(self, alg):
+        """setup core part compression to <alg>"""
+        if alg is None:
+            return
+        assert not any(n.lower() == 'Compression' for n, v in self._params)
+        self.addparam('Compression', alg)
+        self._compressor = util.compressors[alg]()
 
     @property
     def nbparts(self):
@@ -524,14 +548,10 @@
         yield _pack(_fstreamparamsize, len(param))
         if param:
             yield param
-
-        outdebug(self.ui, 'start of parts')
-        for part in self._parts:
-            outdebug(self.ui, 'bundle part: "%s"' % part.type)
-            for chunk in part.getchunks(ui=self.ui):
-                yield chunk
-        outdebug(self.ui, 'end of bundle')
-        yield _pack(_fpartheadersize, 0)
+        # starting compression
+        for chunk in self._getcorechunk():
+            yield self._compressor.compress(chunk)
+        yield self._compressor.flush()
 
     def _paramchunk(self):
         """return a encoded version of all stream parameters"""
@@ -544,6 +564,19 @@
             blocks.append(par)
         return ' '.join(blocks)
 
+    def _getcorechunk(self):
+        """yield chunk for the core part of the bundle
+
+        (all but headers and parameters)"""
+        outdebug(self.ui, 'start of parts')
+        for part in self._parts:
+            outdebug(self.ui, 'bundle part: "%s"' % part.type)
+            for chunk in part.getchunks(ui=self.ui):
+                yield chunk
+        outdebug(self.ui, 'end of bundle')
+        yield _pack(_fpartheadersize, 0)
+
+
     def salvageoutput(self):
         """return a list with a copy of all output parts in the bundle
 
@@ -603,10 +636,10 @@
         magicstring = changegroup.readexactly(fp, 4)
     magic, version = magicstring[0:2], magicstring[2:4]
     if magic != 'HG':
-        raise util.Abort(_('not a Mercurial bundle'))
+        raise error.Abort(_('not a Mercurial bundle'))
     unbundlerclass = formatmap.get(version)
     if unbundlerclass is None:
-        raise util.Abort(_('unknown bundle version %s') % version)
+        raise error.Abort(_('unknown bundle version %s') % version)
     unbundler = unbundlerclass(ui, fp)
     indebug(ui, 'start processing of %s stream' % magicstring)
     return unbundler
@@ -617,9 +650,13 @@
     This class is fed with a binary stream and yields parts through its
     `iterparts` methods."""
 
+    _magicstring = 'HG20'
+
     def __init__(self, ui, fp):
         """If header is specified, we do not read it out of the stream."""
         self.ui = ui
+        self._decompressor = util.decompressors[None]
+        self._compressed = None
         super(unbundle20, self).__init__(fp)
 
     @util.propertycache
@@ -632,15 +669,23 @@
             raise error.BundleValueError('negative bundle param size: %i'
                                          % paramssize)
         if paramssize:
-            for p in self._readexact(paramssize).split(' '):
-                p = p.split('=', 1)
-                p = [urllib.unquote(i) for i in p]
-                if len(p) < 2:
-                    p.append(None)
-                self._processparam(*p)
-                params[p[0]] = p[1]
+            params = self._readexact(paramssize)
+            params = self._processallparams(params)
         return params
 
+    def _processallparams(self, paramsblock):
+        """"""
+        params = {}
+        for p in paramsblock.split(' '):
+            p = p.split('=', 1)
+            p = [urllib.unquote(i) for i in p]
+            if len(p) < 2:
+                p.append(None)
+            self._processparam(*p)
+            params[p[0]] = p[1]
+        return params
+
+
     def _processparam(self, name, value):
         """process a parameter, applying its effect if needed
 
@@ -655,18 +700,62 @@
             raise ValueError('empty parameter name')
         if name[0] not in string.letters:
             raise ValueError('non letter first character: %r' % name)
-        # Some logic will be later added here to try to process the option for
-        # a dict of known parameter.
-        if name[0].islower():
-            indebug(self.ui, "ignoring unknown parameter %r" % name)
+        try:
+            handler = b2streamparamsmap[name.lower()]
+        except KeyError:
+            if name[0].islower():
+                indebug(self.ui, "ignoring unknown parameter %r" % name)
+            else:
+                raise error.BundleUnknownFeatureError(params=(name,))
         else:
-            raise error.UnsupportedPartError(params=(name,))
+            handler(self, name, value)
+
+    def _forwardchunks(self):
+        """utility to transfer a bundle2 as binary
+
+        This is made necessary by the fact the 'getbundle' command over 'ssh'
+        have no way to know then the reply end, relying on the bundle to be
+        interpreted to know its end. This is terrible and we are sorry, but we
+        needed to move forward to get general delta enabled.
+        """
+        yield self._magicstring
+        assert 'params' not in vars(self)
+        paramssize = self._unpack(_fstreamparamsize)[0]
+        if paramssize < 0:
+            raise error.BundleValueError('negative bundle param size: %i'
+                                         % paramssize)
+        yield _pack(_fstreamparamsize, paramssize)
+        if paramssize:
+            params = self._readexact(paramssize)
+            self._processallparams(params)
+            yield params
+            assert self._decompressor is util.decompressors[None]
+        # From there, payload might need to be decompressed
+        self._fp = self._decompressor(self._fp)
+        emptycount = 0
+        while emptycount < 2:
+            # so we can brainlessly loop
+            assert _fpartheadersize == _fpayloadsize
+            size = self._unpack(_fpartheadersize)[0]
+            yield _pack(_fpartheadersize, size)
+            if size:
+                emptycount = 0
+            else:
+                emptycount += 1
+                continue
+            if size == flaginterrupt:
+                continue
+            elif size < 0:
+                raise error.BundleValueError('negative chunk size: %i')
+            yield self._readexact(size)
 
 
     def iterparts(self):
         """yield all parts contained in the stream"""
         # make sure param have been loaded
         self.params
+        # From there, payload need to be decompressed
+        self._fp = self._decompressor(self._fp)
         indebug(self.ui, 'start extraction of bundle2 parts')
         headerblock = self._readpartheader()
         while headerblock is not None:
@@ -690,10 +779,31 @@
         return None
 
     def compressed(self):
-        return False
+        self.params # load params
+        return self._compressed
 
 formatmap = {'20': unbundle20}
 
+b2streamparamsmap = {}
+
+def b2streamparamhandler(name):
+    """register a handler for a stream level parameter"""
+    def decorator(func):
+        assert name not in formatmap
+        b2streamparamsmap[name] = func
+        return func
+    return decorator
+
+@b2streamparamhandler('compression')
+def processcompression(unbundler, param, value):
+    """read compression parameter and install payload decompression"""
+    if value not in util.decompressors:
+        raise error.BundleUnknownFeatureError(params=(param,),
+                                              values=(value,))
+    unbundler._decompressor = util.decompressors[value]
+    if value is not None:
+        unbundler._compressed = True
+
 class bundlepart(object):
     """A bundle2 part contains application level payload
 
@@ -841,6 +951,12 @@
                 outdebug(ui, 'payload chunk size: %i' % len(chunk))
                 yield _pack(_fpayloadsize, len(chunk))
                 yield chunk
+        except GeneratorExit:
+            # GeneratorExit means that nobody is listening for our
+            # results anyway, so just bail quickly rather than trying
+            # to produce an error part.
+            ui.debug('bundle2-generatorexit\n')
+            raise
         except BaseException as exc:
             # backup exception data for later
             ui.debug('bundle2-input-stream-interrupt: encoding exception %s'
@@ -1103,7 +1219,7 @@
             self._payloadstream = util.chunkbuffer(self._payloadchunks(chunk))
             adjust = self.read(internaloffset)
             if len(adjust) != internaloffset:
-                raise util.Abort(_('Seek failed\n'))
+                raise error.Abort(_('Seek failed\n'))
             self._pos = newpos
 
 # These are only the static capabilities.
@@ -1162,14 +1278,13 @@
     unpackerversion = inpart.params.get('version', '01')
     # We should raise an appropriate exception here
     unpacker = changegroup.packermap[unpackerversion][1]
-    cg = unpacker(inpart, 'UN')
+    cg = unpacker(inpart, None)
     # the source and url passed here are overwritten by the one contained in
     # the transaction.hookargs argument. So 'bundle2' is a placeholder
     nbchangesets = None
     if 'nbchanges' in inpart.params:
         nbchangesets = int(inpart.params.get('nbchanges'))
-    ret = changegroup.addchangegroup(op.repo, cg, 'bundle2', 'bundle2',
-                                     expectedtotal=nbchangesets)
+    ret = cg.apply(op.repo, 'bundle2', 'bundle2', expectedtotal=nbchangesets)
     op.records.add('changegroup', {'return': ret})
     if op.reply is not None:
         # This is definitely not the final form of this
@@ -1201,19 +1316,19 @@
     try:
         raw_url = inpart.params['url']
     except KeyError:
-        raise util.Abort(_('remote-changegroup: missing "%s" param') % 'url')
+        raise error.Abort(_('remote-changegroup: missing "%s" param') % 'url')
     parsed_url = util.url(raw_url)
     if parsed_url.scheme not in capabilities['remote-changegroup']:
-        raise util.Abort(_('remote-changegroup does not support %s urls') %
+        raise error.Abort(_('remote-changegroup does not support %s urls') %
             parsed_url.scheme)
 
     try:
         size = int(inpart.params['size'])
     except ValueError:
-        raise util.Abort(_('remote-changegroup: invalid value for param "%s"')
+        raise error.Abort(_('remote-changegroup: invalid value for param "%s"')
             % 'size')
     except KeyError:
-        raise util.Abort(_('remote-changegroup: missing "%s" param') % 'size')
+        raise error.Abort(_('remote-changegroup: missing "%s" param') % 'size')
 
     digests = {}
     for typ in inpart.params.get('digests', '').split():
@@ -1221,7 +1336,7 @@
         try:
             value = inpart.params[param]
         except KeyError:
-            raise util.Abort(_('remote-changegroup: missing "%s" param') %
+            raise error.Abort(_('remote-changegroup: missing "%s" param') %
                 param)
         digests[typ] = value
 
@@ -1233,12 +1348,12 @@
     # we need to make sure we trigger the creation of a transaction object used
     # for the whole processing scope.
     op.gettransaction()
-    import exchange
+    from . import exchange
     cg = exchange.readbundle(op.repo.ui, real_part, raw_url)
     if not isinstance(cg, changegroup.cg1unpacker):
-        raise util.Abort(_('%s: not a bundle version 1.0') %
+        raise error.Abort(_('%s: not a bundle version 1.0') %
             util.hidepassword(raw_url))
-    ret = changegroup.addchangegroup(op.repo, cg, 'bundle2', 'bundle2')
+    ret = cg.apply(op.repo, 'bundle2', 'bundle2')
     op.records.add('changegroup', {'return': ret})
     if op.reply is not None:
         # This is definitely not the final form of this
@@ -1248,8 +1363,8 @@
         part.addparam('return', '%i' % ret, mandatory=False)
     try:
         real_part.validate()
-    except util.Abort as e:
-        raise util.Abort(_('bundle at %s is corrupted:\n%s') %
+    except error.Abort as e:
+        raise error.Abort(_('bundle at %s is corrupted:\n%s') %
             (util.hidepassword(raw_url), str(e)))
     assert not inpart.read()
 
@@ -1271,6 +1386,9 @@
         heads.append(h)
         h = inpart.read(20)
     assert not h
+    # Trigger a transaction so that we are guaranteed to have the lock now.
+    if op.ui.configbool('experimental', 'bundle2lazylocking'):
+        op.gettransaction()
     if heads != op.repo.heads():
         raise error.PushRaced('repository changed while pushing - '
                               'please try again')
@@ -1293,7 +1411,7 @@
 @parthandler('error:abort', ('message', 'hint'))
 def handleerrorabort(op, inpart):
     """Used to transmit abort error over the wire"""
-    raise util.Abort(inpart.params['message'], hint=inpart.params.get('hint'))
+    raise error.Abort(inpart.params['message'], hint=inpart.params.get('hint'))
 
 @parthandler('error:pushkey', ('namespace', 'key', 'new', 'old', 'ret',
                                'in-reply-to'))
@@ -1317,7 +1435,7 @@
     if params is not None:
         kwargs['params'] = params.split('\0')
 
-    raise error.UnsupportedPartError(**kwargs)
+    raise error.BundleUnknownFeatureError(**kwargs)
 
 @parthandler('error:pushraced', ('message',))
 def handleerrorpushraced(op, inpart):
@@ -1339,6 +1457,10 @@
     key = dec(inpart.params['key'])
     old = dec(inpart.params['old'])
     new = dec(inpart.params['new'])
+    # Grab the transaction to ensure that we have the lock before performing the
+    # pushkey.
+    if op.ui.configbool('experimental', 'bundle2lazylocking'):
+        op.gettransaction()
     ret = op.repo.pushkey(namespace, key, old, new)
     record = {'namespace': namespace,
               'key': key,
@@ -1371,6 +1493,11 @@
     if op.ui.config('experimental', 'obsmarkers-exchange-debug', False):
         op.ui.write(('obsmarker-exchange: %i bytes received\n')
                     % len(markerdata))
+    # The mergemarkers call will crash if marker creation is not enabled.
+    # we want to avoid this if the part is advisory.
+    if not inpart.mandatory and op.repo.obsstore.readonly:
+        op.repo.ui.debug('ignoring obsolescence markers, feature not enabled')
+        return
     new = op.repo.obsstore.mergemarkers(tr, markerdata)
     if new:
         op.repo.ui.status(_('%i new obsolescence markers\n') % new)
@@ -1394,6 +1521,9 @@
 
     Payload is pairs of 20 byte changeset nodes and filenodes.
     """
+    # Grab the transaction so we ensure that we have the lock at this point.
+    if op.ui.configbool('experimental', 'bundle2lazylocking'):
+        op.gettransaction()
     cache = tags.hgtagsfnodescache(op.repo.unfiltered())
 
     count = 0
--- a/mercurial/bundlerepo.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/bundlerepo.py	Tue Oct 20 15:59:10 2015 -0500
@@ -11,12 +11,33 @@
 were part of the actual repository.
 """
 
-from node import nullid
-from i18n import _
-import os, tempfile, shutil
-import changegroup, util, mdiff, discovery, cmdutil, scmutil, exchange
-import localrepo, changelog, manifest, filelog, revlog, error, phases, bundle2
-import pathutil
+from __future__ import absolute_import
+
+import os
+import shutil
+import tempfile
+
+from .i18n import _
+from .node import nullid
+
+from . import (
+    bundle2,
+    changegroup,
+    changelog,
+    cmdutil,
+    discovery,
+    error,
+    exchange,
+    filelog,
+    localrepo,
+    manifest,
+    mdiff,
+    pathutil,
+    phases,
+    revlog,
+    scmutil,
+    util,
+)
 
 class bundlerevlog(revlog.revlog):
     def __init__(self, opener, indexfile, bundle, linkmapper):
@@ -174,7 +195,15 @@
                               linkmapper)
 
     def baserevision(self, nodeorrev):
-        return manifest.manifest.revision(self, nodeorrev)
+        node = nodeorrev
+        if isinstance(node, int):
+            node = self.node(node)
+
+        if node in self._mancache:
+            result = self._mancache[node][0].text()
+        else:
+            result = manifest.manifest.revision(self, nodeorrev)
+        return result
 
 class bundlefilelog(bundlerevlog, filelog.filelog):
     def __init__(self, opener, path, bundle, linkmapper):
@@ -208,6 +237,27 @@
 
 class bundlerepository(localrepo.localrepository):
     def __init__(self, ui, path, bundlename):
+        def _writetempbundle(read, suffix, header=''):
+            """Write a temporary file to disk
+
+            This is closure because we need to make sure this tracked by
+            self.tempfile for cleanup purposes."""
+            fdtemp, temp = self.vfs.mkstemp(prefix="hg-bundle-",
+                                            suffix=".hg10un")
+            self.tempfile = temp
+            fptemp = os.fdopen(fdtemp, 'wb')
+
+            try:
+                fptemp.write(header)
+                while True:
+                    chunk = read(2**18)
+                    if not chunk:
+                        break
+                    fptemp.write(chunk)
+            finally:
+                fptemp.close()
+
+            return self.vfs.open(self.tempfile, mode="rb")
         self._tempparent = None
         try:
             localrepo.localrepository.__init__(self, ui, path)
@@ -225,45 +275,35 @@
         self.tempfile = None
         f = util.posixfile(bundlename, "rb")
         self.bundlefile = self.bundle = exchange.readbundle(ui, f, bundlename)
-        if self.bundle.compressed():
-            fdtemp, temp = self.vfs.mkstemp(prefix="hg-bundle-",
-                                            suffix=".hg10un")
-            self.tempfile = temp
-            fptemp = os.fdopen(fdtemp, 'wb')
 
-            try:
-                fptemp.write("HG10UN")
-                while True:
-                    chunk = self.bundle.read(2**18)
-                    if not chunk:
-                        break
-                    fptemp.write(chunk)
-            finally:
-                fptemp.close()
+        if isinstance(self.bundle, bundle2.unbundle20):
+            cgstream = None
+            for part in self.bundle.iterparts():
+                if part.type == 'changegroup':
+                    if cgstream is not None:
+                        raise NotImplementedError("can't process "
+                                                  "multiple changegroups")
+                    cgstream = part
+                    version = part.params.get('version', '01')
+                    if version not in changegroup.packermap:
+                        msg = _('Unsupported changegroup version: %s')
+                        raise error.Abort(msg % version)
+                    if self.bundle.compressed():
+                        cgstream = _writetempbundle(part.read,
+                                                    ".cg%sun" % version)
 
-            f = self.vfs.open(self.tempfile, mode="rb")
+            if cgstream is None:
+                raise error.Abort('No changegroups found')
+            cgstream.seek(0)
+
+            self.bundle = changegroup.packermap[version][1](cgstream, 'UN')
+
+        elif self.bundle.compressed():
+            f = _writetempbundle(self.bundle.read, '.hg10un', header='HG10UN')
             self.bundlefile = self.bundle = exchange.readbundle(ui, f,
                                                                 bundlename,
                                                                 self.vfs)
 
-        if isinstance(self.bundle, bundle2.unbundle20):
-            cgparts = [part for part in self.bundle.iterparts()
-                       if (part.type == 'changegroup')
-                       and (part.params.get('version', '01')
-                            in changegroup.packermap)]
-
-            if not cgparts:
-                raise util.Abort('No changegroups found')
-            version = cgparts[0].params.get('version', '01')
-            cgparts = [p for p in cgparts
-                       if p.params.get('version', '01') == version]
-            if len(cgparts) > 1:
-                raise NotImplementedError("Can't process multiple changegroups")
-            part = cgparts[0]
-
-            part.seek(0)
-            self.bundle = changegroup.packermap[version][1](part, 'UN')
-
         # dict with the mapping 'filename' -> position in the bundle
         self.bundlefilespos = {}
 
@@ -345,7 +385,7 @@
 
 def instance(ui, path, create):
     if create:
-        raise util.Abort(_('cannot create new bundle repository'))
+        raise error.Abort(_('cannot create new bundle repository'))
     # internal config: bundle.mainreporoot
     parentpath = ui.config("bundle", "mainreporoot", "")
     if not parentpath:
@@ -426,19 +466,33 @@
     if bundlename or not localrepo:
         # create a bundle (uncompressed if other repo is not local)
 
-        if other.capable('getbundle'):
-            cg = other.getbundle('incoming', common=common, heads=rheads)
-        elif onlyheads is None and not other.capable('changegroupsubset'):
-            # compat with older servers when pulling all remote heads
-            cg = other.changegroup(incoming, "incoming")
-            rheads = None
+        canbundle2 = (ui.configbool('experimental', 'bundle2-exp', True)
+                      and other.capable('getbundle')
+                      and other.capable('bundle2'))
+        if canbundle2:
+            kwargs = {}
+            kwargs['common'] = common
+            kwargs['heads'] = rheads
+            kwargs['bundlecaps'] = exchange.caps20to10(repo)
+            kwargs['cg'] = True
+            b2 = other.getbundle('incoming', **kwargs)
+            fname = bundle = changegroup.writechunks(ui, b2._forwardchunks(),
+                                                     bundlename)
         else:
-            cg = other.changegroupsubset(incoming, rheads, 'incoming')
-        if localrepo:
-            bundletype = "HG10BZ"
-        else:
-            bundletype = "HG10UN"
-        fname = bundle = changegroup.writebundle(ui, cg, bundlename, bundletype)
+            if other.capable('getbundle'):
+                cg = other.getbundle('incoming', common=common, heads=rheads)
+            elif onlyheads is None and not other.capable('changegroupsubset'):
+                # compat with older servers when pulling all remote heads
+                cg = other.changegroup(incoming, "incoming")
+                rheads = None
+            else:
+                cg = other.changegroupsubset(incoming, rheads, 'incoming')
+            if localrepo:
+                bundletype = "HG10BZ"
+            else:
+                bundletype = "HG10UN"
+            fname = bundle = changegroup.writebundle(ui, cg, bundlename,
+                                                     bundletype)
         # keep written bundle?
         if bundlename:
             bundle = None
--- a/mercurial/changegroup.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/changegroup.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,12 +5,30 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
+from __future__ import absolute_import
+
+import os
+import struct
+import tempfile
 import weakref
-from i18n import _
-from node import nullrev, nullid, hex, short
-import mdiff, util, dagutil
-import struct, os, bz2, zlib, tempfile
-import discovery, error, phases, branchmap
+
+from .i18n import _
+from .node import (
+    hex,
+    nullid,
+    nullrev,
+    short,
+)
+
+from . import (
+    branchmap,
+    dagutil,
+    discovery,
+    error,
+    mdiff,
+    phases,
+    util,
+)
 
 _CHANGEGROUPV1_DELTA_HEADER = "20s20s20s20s"
 _CHANGEGROUPV2_DELTA_HEADER = "20s20s20s20s20s"
@@ -19,7 +37,7 @@
     '''read n bytes from stream.read and abort if less was available'''
     s = stream.read(n)
     if len(s) < n:
-        raise util.Abort(_("stream ended unexpectedly"
+        raise error.Abort(_("stream ended unexpectedly"
                            " (got %d bytes, expected %d)")
                           % (len(s), n))
     return s
@@ -30,7 +48,7 @@
     l = struct.unpack(">l", d)[0]
     if l <= 4:
         if l:
-            raise util.Abort(_("invalid chunk length %d") % l)
+            raise error.Abort(_("invalid chunk length %d") % l)
         return ""
     return readexactly(stream, l - 4)
 
@@ -61,34 +79,26 @@
         result = -1 + changedheads
     return result
 
-class nocompress(object):
-    def compress(self, x):
-        return x
-    def flush(self):
-        return ""
-
 bundletypes = {
-    "": ("", nocompress), # only when using unbundle on ssh and old http servers
+    "": ("", None),       # only when using unbundle on ssh and old http servers
                           # since the unification ssh accepts a header but there
                           # is no capability signaling it.
     "HG20": (), # special-cased below
-    "HG10UN": ("HG10UN", nocompress),
-    "HG10BZ": ("HG10", lambda: bz2.BZ2Compressor()),
-    "HG10GZ": ("HG10GZ", lambda: zlib.compressobj()),
+    "HG10UN": ("HG10UN", None),
+    "HG10BZ": ("HG10", 'BZ'),
+    "HG10GZ": ("HG10GZ", 'GZ'),
 }
 
 # hgweb uses this list to communicate its preferred type
 bundlepriority = ['HG10GZ', 'HG10BZ', 'HG10UN']
 
-def writebundle(ui, cg, filename, bundletype, vfs=None):
-    """Write a bundle file and return its filename.
+def writechunks(ui, chunks, filename, vfs=None):
+    """Write chunks to a file and return its filename.
 
+    The stream is assumed to be a bundle file.
     Existing files will not be overwritten.
     If no filename is specified, a temporary file is created.
-    bz2 compression can be turned off.
-    The bundle file will be deleted in case of errors.
     """
-
     fh = None
     cleanup = None
     try:
@@ -101,32 +111,8 @@
             fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
             fh = os.fdopen(fd, "wb")
         cleanup = filename
-
-        if bundletype == "HG20":
-            import bundle2
-            bundle = bundle2.bundle20(ui)
-            part = bundle.newpart('changegroup', data=cg.getchunks())
-            part.addparam('version', cg.version)
-            z = nocompress()
-            chunkiter = bundle.getchunks()
-        else:
-            if cg.version != '01':
-                raise util.Abort(_('old bundle types only supports v1 '
-                                   'changegroups'))
-            header, compressor = bundletypes[bundletype]
-            fh.write(header)
-            z = compressor()
-            chunkiter = cg.getchunks()
-
-        # parse the changegroup data, otherwise we will block
-        # in case of sshrepo because we don't know the end of the stream
-
-        # an empty chunkgroup is the end of the changegroup
-        # a changegroup has at least 2 chunkgroups (changelog and manifest).
-        # after that, an empty chunkgroup is the end of the changegroup
-        for chunk in chunkiter:
-            fh.write(z.compress(chunk))
-        fh.write(z.flush())
+        for c in chunks:
+            fh.write(c)
         cleanup = None
         return filename
     finally:
@@ -138,34 +124,86 @@
             else:
                 os.unlink(cleanup)
 
-def decompressor(fh, alg):
-    if alg == 'UN':
-        return fh
-    elif alg == 'GZ':
-        def generator(f):
-            zd = zlib.decompressobj()
-            for chunk in util.filechunkiter(f):
-                yield zd.decompress(chunk)
-    elif alg == 'BZ':
-        def generator(f):
-            zd = bz2.BZ2Decompressor()
-            zd.decompress("BZ")
-            for chunk in util.filechunkiter(f, 4096):
-                yield zd.decompress(chunk)
+def writebundle(ui, cg, filename, bundletype, vfs=None, compression=None):
+    """Write a bundle file and return its filename.
+
+    Existing files will not be overwritten.
+    If no filename is specified, a temporary file is created.
+    bz2 compression can be turned off.
+    The bundle file will be deleted in case of errors.
+    """
+
+    if bundletype == "HG20":
+        from . import bundle2
+        bundle = bundle2.bundle20(ui)
+        bundle.setcompression(compression)
+        part = bundle.newpart('changegroup', data=cg.getchunks())
+        part.addparam('version', cg.version)
+        chunkiter = bundle.getchunks()
     else:
-        raise util.Abort("unknown bundle compression '%s'" % alg)
-    return util.chunkbuffer(generator(fh))
+        # compression argument is only for the bundle2 case
+        assert compression is None
+        if cg.version != '01':
+            raise error.Abort(_('old bundle types only supports v1 '
+                                'changegroups'))
+        header, comp = bundletypes[bundletype]
+        if comp not in util.compressors:
+            raise error.Abort(_('unknown stream compression type: %s')
+                              % comp)
+        z = util.compressors[comp]()
+        subchunkiter = cg.getchunks()
+        def chunkiter():
+            yield header
+            for chunk in subchunkiter:
+                yield z.compress(chunk)
+            yield z.flush()
+        chunkiter = chunkiter()
+
+    # parse the changegroup data, otherwise we will block
+    # in case of sshrepo because we don't know the end of the stream
+
+    # an empty chunkgroup is the end of the changegroup
+    # a changegroup has at least 2 chunkgroups (changelog and manifest).
+    # after that, an empty chunkgroup is the end of the changegroup
+    return writechunks(ui, chunkiter, filename, vfs=vfs)
 
 class cg1unpacker(object):
+    """Unpacker for cg1 changegroup streams.
+
+    A changegroup unpacker handles the framing of the revision data in
+    the wire format. Most consumers will want to use the apply()
+    method to add the changes from the changegroup to a repository.
+
+    If you're forwarding a changegroup unmodified to another consumer,
+    use getchunks(), which returns an iterator of changegroup
+    chunks. This is mostly useful for cases where you need to know the
+    data stream has ended by observing the end of the changegroup.
+
+    deltachunk() is useful only if you're applying delta data. Most
+    consumers should prefer apply() instead.
+
+    A few other public methods exist. Those are used only for
+    bundlerepo and some debug commands - their use is discouraged.
+    """
     deltaheader = _CHANGEGROUPV1_DELTA_HEADER
     deltaheadersize = struct.calcsize(deltaheader)
     version = '01'
     def __init__(self, fh, alg):
-        self._stream = decompressor(fh, alg)
+        if alg == 'UN':
+            alg = None # get more modern without breaking too much
+        if not alg in util.decompressors:
+            raise error.Abort(_('unknown stream compression type: %s')
+                             % alg)
+        if alg == 'BZ':
+            alg = '_truncatedBZ'
+        self._stream = util.decompressors[alg](fh)
         self._type = alg
         self.callback = None
+
+    # These methods (compressed, read, seek, tell) all appear to only
+    # be used by bundlerepo, but it's a little hard to tell.
     def compressed(self):
-        return self._type != 'UN'
+        return self._type is not None
     def read(self, l):
         return self._stream.read(l)
     def seek(self, pos):
@@ -175,12 +213,12 @@
     def close(self):
         return self._stream.close()
 
-    def chunklength(self):
+    def _chunklength(self):
         d = readexactly(self._stream, 4)
         l = struct.unpack(">l", d)[0]
         if l <= 4:
             if l:
-                raise util.Abort(_("invalid chunk length %d") % l)
+                raise error.Abort(_("invalid chunk length %d") % l)
             return 0
         if self.callback:
             self.callback()
@@ -196,7 +234,7 @@
 
     def filelogheader(self):
         """return the header of the filelogs chunk, v10 only has the filename"""
-        l = self.chunklength()
+        l = self._chunklength()
         if not l:
             return {}
         fname = readexactly(self._stream, l)
@@ -211,7 +249,7 @@
         return node, p1, p2, deltabase, cs
 
     def deltachunk(self, prevnode):
-        l = self.chunklength()
+        l = self._chunklength()
         if not l:
             return {}
         headerdata = readexactly(self._stream, self.deltaheadersize)
@@ -249,7 +287,207 @@
                     pos = next
             yield closechunk()
 
+    def _unpackmanifests(self, repo, revmap, trp, prog, numchanges):
+        # We know that we'll never have more manifests than we had
+        # changesets.
+        self.callback = prog(_('manifests'), numchanges)
+        # no need to check for empty manifest group here:
+        # if the result of the merge of 1 and 2 is the same in 3 and 4,
+        # no new manifest will be created and the manifest group will
+        # be empty during the pull
+        self.manifestheader()
+        repo.manifest.addgroup(self, revmap, trp)
+        repo.ui.progress(_('manifests'), None)
+
+    def apply(self, repo, srctype, url, emptyok=False,
+              targetphase=phases.draft, expectedtotal=None):
+        """Add the changegroup returned by source.read() to this repo.
+        srctype is a string like 'push', 'pull', or 'unbundle'.  url is
+        the URL of the repo where this changegroup is coming from.
+
+        Return an integer summarizing the change to this repo:
+        - nothing changed or no source: 0
+        - more heads than before: 1+added heads (2..n)
+        - fewer heads than before: -1-removed heads (-2..-n)
+        - number of heads stays the same: 1
+        """
+        repo = repo.unfiltered()
+        def csmap(x):
+            repo.ui.debug("add changeset %s\n" % short(x))
+            return len(cl)
+
+        def revmap(x):
+            return cl.rev(x)
+
+        changesets = files = revisions = 0
+
+        tr = repo.transaction("\n".join([srctype, util.hidepassword(url)]))
+        # The transaction could have been created before and already
+        # carries source information. In this case we use the top
+        # level data. We overwrite the argument because we need to use
+        # the top level value (if they exist) in this function.
+        srctype = tr.hookargs.setdefault('source', srctype)
+        url = tr.hookargs.setdefault('url', url)
+
+        # write changelog data to temp files so concurrent readers will not see
+        # inconsistent view
+        cl = repo.changelog
+        cl.delayupdate(tr)
+        oldheads = cl.heads()
+        try:
+            repo.hook('prechangegroup', throw=True, **tr.hookargs)
+
+            trp = weakref.proxy(tr)
+            # pull off the changeset group
+            repo.ui.status(_("adding changesets\n"))
+            clstart = len(cl)
+            class prog(object):
+                def __init__(self, step, total):
+                    self._step = step
+                    self._total = total
+                    self._count = 1
+                def __call__(self):
+                    repo.ui.progress(self._step, self._count, unit=_('chunks'),
+                                     total=self._total)
+                    self._count += 1
+            self.callback = prog(_('changesets'), expectedtotal)
+
+            efiles = set()
+            def onchangelog(cl, node):
+                efiles.update(cl.read(node)[3])
+
+            self.changelogheader()
+            srccontent = cl.addgroup(self, csmap, trp,
+                                     addrevisioncb=onchangelog)
+            efiles = len(efiles)
+
+            if not (srccontent or emptyok):
+                raise error.Abort(_("received changelog group is empty"))
+            clend = len(cl)
+            changesets = clend - clstart
+            repo.ui.progress(_('changesets'), None)
+
+            # pull off the manifest group
+            repo.ui.status(_("adding manifests\n"))
+            self._unpackmanifests(repo, revmap, trp, prog, changesets)
+
+            needfiles = {}
+            if repo.ui.configbool('server', 'validate', default=False):
+                # validate incoming csets have their manifests
+                for cset in xrange(clstart, clend):
+                    mfnode = repo.changelog.read(repo.changelog.node(cset))[0]
+                    mfest = repo.manifest.readdelta(mfnode)
+                    # store file nodes we must see
+                    for f, n in mfest.iteritems():
+                        needfiles.setdefault(f, set()).add(n)
+
+            # process the files
+            repo.ui.status(_("adding file changes\n"))
+            self.callback = None
+            pr = prog(_('files'), efiles)
+            newrevs, newfiles = _addchangegroupfiles(
+                repo, self, revmap, trp, pr, needfiles)
+            revisions += newrevs
+            files += newfiles
+
+            dh = 0
+            if oldheads:
+                heads = cl.heads()
+                dh = len(heads) - len(oldheads)
+                for h in heads:
+                    if h not in oldheads and repo[h].closesbranch():
+                        dh -= 1
+            htext = ""
+            if dh:
+                htext = _(" (%+d heads)") % dh
+
+            repo.ui.status(_("added %d changesets"
+                             " with %d changes to %d files%s\n")
+                             % (changesets, revisions, files, htext))
+            repo.invalidatevolatilesets()
+
+            if changesets > 0:
+                if 'node' not in tr.hookargs:
+                    tr.hookargs['node'] = hex(cl.node(clstart))
+                    hookargs = dict(tr.hookargs)
+                else:
+                    hookargs = dict(tr.hookargs)
+                    hookargs['node'] = hex(cl.node(clstart))
+                repo.hook('pretxnchangegroup', throw=True, **hookargs)
+
+            added = [cl.node(r) for r in xrange(clstart, clend)]
+            publishing = repo.publishing()
+            if srctype in ('push', 'serve'):
+                # Old servers can not push the boundary themselves.
+                # New servers won't push the boundary if changeset already
+                # exists locally as secret
+                #
+                # We should not use added here but the list of all change in
+                # the bundle
+                if publishing:
+                    phases.advanceboundary(repo, tr, phases.public, srccontent)
+                else:
+                    # Those changesets have been pushed from the outside, their
+                    # phases are going to be pushed alongside. Therefor
+                    # `targetphase` is ignored.
+                    phases.advanceboundary(repo, tr, phases.draft, srccontent)
+                    phases.retractboundary(repo, tr, phases.draft, added)
+            elif srctype != 'strip':
+                # publishing only alter behavior during push
+                #
+                # strip should not touch boundary at all
+                phases.retractboundary(repo, tr, targetphase, added)
+
+            if changesets > 0:
+                if srctype != 'strip':
+                    # During strip, branchcache is invalid but coming call to
+                    # `destroyed` will repair it.
+                    # In other case we can safely update cache on disk.
+                    branchmap.updatecache(repo.filtered('served'))
+
+                def runhooks():
+                    # These hooks run when the lock releases, not when the
+                    # transaction closes. So it's possible for the changelog
+                    # to have changed since we last saw it.
+                    if clstart >= len(repo):
+                        return
+
+                    # forcefully update the on-disk branch cache
+                    repo.ui.debug("updating the branch cache\n")
+                    repo.hook("changegroup", **hookargs)
+
+                    for n in added:
+                        args = hookargs.copy()
+                        args['node'] = hex(n)
+                        repo.hook("incoming", **args)
+
+                    newheads = [h for h in repo.heads() if h not in oldheads]
+                    repo.ui.log("incoming",
+                                "%s incoming changes - new heads: %s\n",
+                                len(added),
+                                ', '.join([hex(c[:6]) for c in newheads]))
+
+                tr.addpostclose('changegroup-runhooks-%020i' % clstart,
+                                lambda tr: repo._afterlock(runhooks))
+
+            tr.close()
+
+        finally:
+            tr.release()
+            repo.ui.flush()
+        # never return 0 here:
+        if dh < 0:
+            return dh - 1
+        else:
+            return dh + 1
+
 class cg2unpacker(cg1unpacker):
+    """Unpacker for cg2 streams.
+
+    cg2 streams add support for generaldelta, so the delta header
+    format is slightly different. All other features about the data
+    remain the same.
+    """
     deltaheader = _CHANGEGROUPV2_DELTA_HEADER
     deltaheadersize = struct.calcsize(deltaheader)
     version = '02'
@@ -355,6 +593,16 @@
         rr, rl = revlog.rev, revlog.linkrev
         return [n for n in missing if rl(rr(n)) not in commonrevs]
 
+    def _packmanifests(self, mfnodes, lookuplinknode):
+        """Pack flat manifests into a changegroup stream."""
+        ml = self._repo.manifest
+        size = 0
+        for chunk in self.group(
+                mfnodes, ml, lookuplinknode, units=_('manifests')):
+            size += len(chunk)
+            yield chunk
+        self._verbosenote(_('%8.i (manifests)\n') % size)
+
     def generate(self, commonrevs, clnodes, fastpathlinkrev, source):
         '''yield a sequence of changegroup chunks (strings)'''
         repo = self._repo
@@ -403,7 +651,7 @@
         # Callback for the manifest, used to collect linkrevs for filelog
         # revisions.
         # Returns the linkrev node (collected in lookupcl).
-        def lookupmf(x):
+        def lookupmflinknode(x):
             clnode = mfs[x]
             if not fastpathlinkrev:
                 mdata = ml.readfast(x)
@@ -418,11 +666,8 @@
             return clnode
 
         mfnodes = self.prune(ml, mfs, commonrevs)
-        size = 0
-        for chunk in self.group(mfnodes, ml, lookupmf, units=_('manifests')):
-            size += len(chunk)
-            yield chunk
-        self._verbosenote(_('%8.i (manifests)\n') % size)
+        for x in self._packmanifests(mfnodes, lookupmflinknode):
+            yield x
 
         mfs.clear()
         clrevs = set(cl.rev(x) for x in clnodes)
@@ -459,7 +704,7 @@
         for i, fname in enumerate(sorted(changedfiles)):
             filerevlog = repo.file(fname)
             if not filerevlog:
-                raise util.Abort(_("empty or missing revlog for %s") % fname)
+                raise error.Abort(_("empty or missing revlog for %s") % fname)
 
             linkrevnodes = linknodes(filerevlog, fname)
             # Lookup for filenodes, we collected the linkrev nodes above in the
@@ -540,7 +785,9 @@
         return struct.pack(self.deltaheader, node, p1n, p2n, basenode, linknode)
 
 packermap = {'01': (cg1packer, cg1unpacker),
-             '02': (cg2packer, cg2unpacker)}
+             # cg2 adds support for exchanging generaldelta
+             '02': (cg2packer, cg2unpacker),
+}
 
 def _changegroupinfo(repo, nodes, source):
     if repo.ui.verbose or source == 'bundle':
@@ -566,9 +813,9 @@
     _changegroupinfo(repo, csets, source)
     return bundler.generate(commonrevs, csets, fastpathlinkrev, source)
 
-def getsubset(repo, outgoing, bundler, source, fastpath=False, version='01'):
+def getsubset(repo, outgoing, bundler, source, fastpath=False):
     gengroup = getsubsetraw(repo, outgoing, bundler, source, fastpath)
-    return packermap[version][1](util.chunkbuffer(gengroup), 'UN')
+    return packermap[bundler.version][1](util.chunkbuffer(gengroup), None)
 
 def changegroupsubset(repo, roots, heads, source, version='01'):
     """Compute a changegroup consisting of all the nodes that are
@@ -595,7 +842,7 @@
     discbases = [n for n in discbases if n not in included]
     outgoing = discovery.outgoing(cl, discbases, heads)
     bundler = packermap[version][0](repo)
-    return getsubset(repo, outgoing, bundler, source, version=version)
+    return getsubset(repo, outgoing, bundler, source)
 
 def getlocalchangegroupraw(repo, source, outgoing, bundlecaps=None,
                            version='01'):
@@ -608,14 +855,15 @@
     bundler = packermap[version][0](repo, bundlecaps)
     return getsubsetraw(repo, outgoing, bundler, source)
 
-def getlocalchangegroup(repo, source, outgoing, bundlecaps=None):
+def getlocalchangegroup(repo, source, outgoing, bundlecaps=None,
+                        version='01'):
     """Like getbundle, but taking a discovery.outgoing as an argument.
 
     This is only implemented for local repos and reuses potentially
     precomputed sets in outgoing."""
     if not outgoing.missing:
         return None
-    bundler = cg1packer(repo, bundlecaps)
+    bundler = packermap[version][0](repo, bundlecaps)
     return getsubset(repo, outgoing, bundler, source)
 
 def computeoutgoing(repo, heads, common):
@@ -637,7 +885,8 @@
         heads = cl.heads()
     return discovery.outgoing(cl, common, heads)
 
-def getchangegroup(repo, source, heads=None, common=None, bundlecaps=None):
+def getchangegroup(repo, source, heads=None, common=None, bundlecaps=None,
+                   version='01'):
     """Like changegroupsubset, but returns the set difference between the
     ancestors of heads and the ancestors common.
 
@@ -647,13 +896,14 @@
     current discovery protocol works.
     """
     outgoing = computeoutgoing(repo, heads, common)
-    return getlocalchangegroup(repo, source, outgoing, bundlecaps=bundlecaps)
+    return getlocalchangegroup(repo, source, outgoing, bundlecaps=bundlecaps,
+                               version=version)
 
 def changegroup(repo, basenodes, source):
     # to avoid a race we use changegroupsubset() (issue1320)
     return changegroupsubset(repo, basenodes, repo.heads(), source)
 
-def addchangegroupfiles(repo, source, revmap, trp, pr, needfiles):
+def _addchangegroupfiles(repo, source, revmap, trp, pr, needfiles):
     revisions = 0
     files = 0
     while True:
@@ -667,9 +917,9 @@
         o = len(fl)
         try:
             if not fl.addgroup(source, revmap, trp):
-                raise util.Abort(_("received file revlog group is empty"))
+                raise error.Abort(_("received file revlog group is empty"))
         except error.CensoredBaseError as e:
-            raise util.Abort(_("received delta base is censored: %s") % e)
+            raise error.Abort(_("received delta base is censored: %s") % e)
         revisions += len(fl) - o
         files += 1
         if f in needfiles:
@@ -679,7 +929,7 @@
                 if n in needs:
                     needs.remove(n)
                 else:
-                    raise util.Abort(
+                    raise error.Abort(
                         _("received spurious file revlog entry"))
             if not needs:
                 del needfiles[f]
@@ -691,202 +941,8 @@
             try:
                 fl.rev(n)
             except error.LookupError:
-                raise util.Abort(
+                raise error.Abort(
                     _('missing file data for %s:%s - run hg verify') %
                     (f, hex(n)))
 
     return revisions, files
-
-def addchangegroup(repo, source, srctype, url, emptyok=False,
-                   targetphase=phases.draft, expectedtotal=None):
-    """Add the changegroup returned by source.read() to this repo.
-    srctype is a string like 'push', 'pull', or 'unbundle'.  url is
-    the URL of the repo where this changegroup is coming from.
-
-    Return an integer summarizing the change to this repo:
-    - nothing changed or no source: 0
-    - more heads than before: 1+added heads (2..n)
-    - fewer heads than before: -1-removed heads (-2..-n)
-    - number of heads stays the same: 1
-    """
-    repo = repo.unfiltered()
-    def csmap(x):
-        repo.ui.debug("add changeset %s\n" % short(x))
-        return len(cl)
-
-    def revmap(x):
-        return cl.rev(x)
-
-    if not source:
-        return 0
-
-    changesets = files = revisions = 0
-
-    tr = repo.transaction("\n".join([srctype, util.hidepassword(url)]))
-    # The transaction could have been created before and already carries source
-    # information. In this case we use the top level data. We overwrite the
-    # argument because we need to use the top level value (if they exist) in
-    # this function.
-    srctype = tr.hookargs.setdefault('source', srctype)
-    url = tr.hookargs.setdefault('url', url)
-
-    # write changelog data to temp files so concurrent readers will not see
-    # inconsistent view
-    cl = repo.changelog
-    cl.delayupdate(tr)
-    oldheads = cl.heads()
-    try:
-        repo.hook('prechangegroup', throw=True, **tr.hookargs)
-
-        trp = weakref.proxy(tr)
-        # pull off the changeset group
-        repo.ui.status(_("adding changesets\n"))
-        clstart = len(cl)
-        class prog(object):
-            def __init__(self, step, total):
-                self._step = step
-                self._total = total
-                self._count = 1
-            def __call__(self):
-                repo.ui.progress(self._step, self._count, unit=_('chunks'),
-                                 total=self._total)
-                self._count += 1
-        source.callback = prog(_('changesets'), expectedtotal)
-
-        efiles = set()
-        def onchangelog(cl, node):
-            efiles.update(cl.read(node)[3])
-
-        source.changelogheader()
-        srccontent = cl.addgroup(source, csmap, trp,
-                                 addrevisioncb=onchangelog)
-        efiles = len(efiles)
-
-        if not (srccontent or emptyok):
-            raise util.Abort(_("received changelog group is empty"))
-        clend = len(cl)
-        changesets = clend - clstart
-        repo.ui.progress(_('changesets'), None)
-
-        # pull off the manifest group
-        repo.ui.status(_("adding manifests\n"))
-        # manifests <= changesets
-        source.callback = prog(_('manifests'), changesets)
-        # no need to check for empty manifest group here:
-        # if the result of the merge of 1 and 2 is the same in 3 and 4,
-        # no new manifest will be created and the manifest group will
-        # be empty during the pull
-        source.manifestheader()
-        repo.manifest.addgroup(source, revmap, trp)
-        repo.ui.progress(_('manifests'), None)
-
-        needfiles = {}
-        if repo.ui.configbool('server', 'validate', default=False):
-            # validate incoming csets have their manifests
-            for cset in xrange(clstart, clend):
-                mfnode = repo.changelog.read(repo.changelog.node(cset))[0]
-                mfest = repo.manifest.readdelta(mfnode)
-                # store file nodes we must see
-                for f, n in mfest.iteritems():
-                    needfiles.setdefault(f, set()).add(n)
-
-        # process the files
-        repo.ui.status(_("adding file changes\n"))
-        source.callback = None
-        pr = prog(_('files'), efiles)
-        newrevs, newfiles = addchangegroupfiles(repo, source, revmap, trp, pr,
-                                                needfiles)
-        revisions += newrevs
-        files += newfiles
-
-        dh = 0
-        if oldheads:
-            heads = cl.heads()
-            dh = len(heads) - len(oldheads)
-            for h in heads:
-                if h not in oldheads and repo[h].closesbranch():
-                    dh -= 1
-        htext = ""
-        if dh:
-            htext = _(" (%+d heads)") % dh
-
-        repo.ui.status(_("added %d changesets"
-                         " with %d changes to %d files%s\n")
-                         % (changesets, revisions, files, htext))
-        repo.invalidatevolatilesets()
-
-        if changesets > 0:
-            p = lambda: tr.writepending() and repo.root or ""
-            if 'node' not in tr.hookargs:
-                tr.hookargs['node'] = hex(cl.node(clstart))
-                hookargs = dict(tr.hookargs)
-            else:
-                hookargs = dict(tr.hookargs)
-                hookargs['node'] = hex(cl.node(clstart))
-            repo.hook('pretxnchangegroup', throw=True, pending=p, **hookargs)
-
-        added = [cl.node(r) for r in xrange(clstart, clend)]
-        publishing = repo.publishing()
-        if srctype in ('push', 'serve'):
-            # Old servers can not push the boundary themselves.
-            # New servers won't push the boundary if changeset already
-            # exists locally as secret
-            #
-            # We should not use added here but the list of all change in
-            # the bundle
-            if publishing:
-                phases.advanceboundary(repo, tr, phases.public, srccontent)
-            else:
-                # Those changesets have been pushed from the outside, their
-                # phases are going to be pushed alongside. Therefor
-                # `targetphase` is ignored.
-                phases.advanceboundary(repo, tr, phases.draft, srccontent)
-                phases.retractboundary(repo, tr, phases.draft, added)
-        elif srctype != 'strip':
-            # publishing only alter behavior during push
-            #
-            # strip should not touch boundary at all
-            phases.retractboundary(repo, tr, targetphase, added)
-
-        if changesets > 0:
-            if srctype != 'strip':
-                # During strip, branchcache is invalid but coming call to
-                # `destroyed` will repair it.
-                # In other case we can safely update cache on disk.
-                branchmap.updatecache(repo.filtered('served'))
-
-            def runhooks():
-                # These hooks run when the lock releases, not when the
-                # transaction closes. So it's possible for the changelog
-                # to have changed since we last saw it.
-                if clstart >= len(repo):
-                    return
-
-                # forcefully update the on-disk branch cache
-                repo.ui.debug("updating the branch cache\n")
-                repo.hook("changegroup", **hookargs)
-
-                for n in added:
-                    args = hookargs.copy()
-                    args['node'] = hex(n)
-                    repo.hook("incoming", **args)
-
-                newheads = [h for h in repo.heads() if h not in oldheads]
-                repo.ui.log("incoming",
-                            "%s incoming changes - new heads: %s\n",
-                            len(added),
-                            ', '.join([hex(c[:6]) for c in newheads]))
-
-            tr.addpostclose('changegroup-runhooks-%020i' % clstart,
-                            lambda tr: repo._afterlock(runhooks))
-
-        tr.close()
-
-    finally:
-        tr.release()
-        repo.ui.flush()
-    # never return 0 here:
-    if dh < 0:
-        return dh - 1
-    else:
-        return dh + 1
--- a/mercurial/changelog.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/changelog.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,9 +5,21 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-from node import bin, hex, nullid
-from i18n import _
-import util, error, revlog, encoding
+from __future__ import absolute_import
+
+from .i18n import _
+from .node import (
+    bin,
+    hex,
+    nullid,
+)
+
+from . import (
+    encoding,
+    error,
+    revlog,
+    util,
+)
 
 _defaultextra = {'branch': 'default'}
 
@@ -172,6 +184,9 @@
         self.rev(self.node(0))
         return self._nodecache
 
+    def reachableroots(self, minroot, heads, roots, includepath=False):
+        return self.index.reachableroots2(minroot, heads, roots, includepath)
+
     def headrevs(self):
         if self.filteredrevs:
             try:
--- a/mercurial/cmdutil.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/cmdutil.py	Tue Oct 20 15:59:10 2015 -0500
@@ -10,7 +10,7 @@
 import os, sys, errno, re, tempfile, cStringIO, shutil
 import util, scmutil, templater, patch, error, templatekw, revlog, copies
 import match as matchmod
-import context, repair, graphmod, revset, phases, obsolete, pathutil
+import repair, graphmod, revset, phases, obsolete, pathutil
 import changelog
 import bookmarks
 import encoding
@@ -63,7 +63,7 @@
     """ Prompts the user to filter the originalhunks and return a list of
     selected hunks.
     *operation* is used for ui purposes to indicate the user
-    what kind of filtering they are doing: reverting, commiting, shelving, etc.
+    what kind of filtering they are doing: reverting, committing, shelving, etc.
     *operation* has to be a translated string.
     """
     usecurses =  ui.configbool('experimental', 'crecord', False)
@@ -85,7 +85,7 @@
             msg = _('running non-interactively, use %s instead') % cmdsuggest
         else:
             msg = _('running non-interactively')
-        raise util.Abort(msg)
+        raise error.Abort(msg)
 
     # make sure username is set before going interactive
     if not opts.get('user'):
@@ -109,7 +109,7 @@
         checkunfinished(repo, commit=True)
         merge = len(repo[None].parents()) > 1
         if merge:
-            raise util.Abort(_('cannot partially commit a merge '
+            raise error.Abort(_('cannot partially commit a merge '
                                '(use "hg commit" instead)'))
 
         status = repo.status(match=match)
@@ -123,7 +123,7 @@
         try:
             chunks = filterfn(ui, originalchunks)
         except patch.PatchError as err:
-            raise util.Abort(_('error parsing patch: %s') % err)
+            raise error.Abort(_('error parsing patch: %s') % err)
 
         # We need to keep a backup of files that have been newly added and
         # modified during the recording process because there is a previous
@@ -193,7 +193,7 @@
                     ui.debug(fp.getvalue())
                     patch.internalpatch(ui, repo, fp, 1, eolmode=None)
                 except patch.PatchError as err:
-                    raise util.Abort(str(err))
+                    raise error.Abort(str(err))
             del fp
 
             # 4. We prepared working directory according to filtered
@@ -305,10 +305,10 @@
 
 def bailifchanged(repo, merge=True):
     if merge and repo.dirstate.p2() != nullid:
-        raise util.Abort(_('outstanding uncommitted merge'))
+        raise error.Abort(_('outstanding uncommitted merge'))
     modified, added, removed, deleted = repo.status()[:4]
     if modified or added or removed or deleted:
-        raise util.Abort(_('uncommitted changes'))
+        raise error.Abort(_('uncommitted changes'))
     ctx = repo[None]
     for s in sorted(ctx.substate):
         ctx.sub(s).bailifchanged()
@@ -319,7 +319,7 @@
     logfile = opts.get('logfile')
 
     if message and logfile:
-        raise util.Abort(_('options --message and --logfile are mutually '
+        raise error.Abort(_('options --message and --logfile are mutually '
                            'exclusive'))
     if not message and logfile:
         try:
@@ -328,7 +328,7 @@
             else:
                 message = '\n'.join(util.readfile(logfile).splitlines())
         except IOError as inst:
-            raise util.Abort(_("can't read commit message '%s': %s") %
+            raise error.Abort(_("can't read commit message '%s': %s") %
                              (logfile, inst.strerror))
     return message
 
@@ -387,9 +387,9 @@
         try:
             limit = int(limit)
         except ValueError:
-            raise util.Abort(_('limit must be a positive integer'))
+            raise error.Abort(_('limit must be a positive integer'))
         if limit <= 0:
-            raise util.Abort(_('limit must be positive'))
+            raise error.Abort(_('limit must be positive'))
     else:
         limit = None
     return limit
@@ -437,7 +437,7 @@
             i += 1
         return ''.join(newname)
     except KeyError as inst:
-        raise util.Abort(_("invalid format spec '%%%s' in output filename") %
+        raise error.Abort(_("invalid format spec '%%%s' in output filename") %
                          inst.args[0])
 
 def makefileobj(repo, pat, node=None, desc=None, total=None,
@@ -495,7 +495,7 @@
             msg = _('cannot specify --changelog or --manifest or --dir '
                     'without a repository')
     if msg:
-        raise util.Abort(msg)
+        raise error.Abort(msg)
 
     r = None
     if repo:
@@ -503,7 +503,7 @@
             r = repo.unfiltered().changelog
         elif dir:
             if 'treemanifest' not in repo.requirements:
-                raise util.Abort(_("--dir can only be used on repos with "
+                raise error.Abort(_("--dir can only be used on repos with "
                                    "treemanifest enabled"))
             dirlog = repo.dirlog(file_)
             if len(dirlog):
@@ -518,7 +518,7 @@
         if not file_:
             raise error.CommandError(cmd, _('invalid arguments'))
         if not os.path.isfile(file_):
-            raise util.Abort(_("revlog '%s' not found") % file_)
+            raise error.Abort(_("revlog '%s' not found") % file_)
         r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
                           file_[:-2] + ".i")
     return r
@@ -716,17 +716,17 @@
 
     pats = scmutil.expandpats(pats)
     if not pats:
-        raise util.Abort(_('no source or destination specified'))
+        raise error.Abort(_('no source or destination specified'))
     if len(pats) == 1:
-        raise util.Abort(_('no destination specified'))
+        raise error.Abort(_('no destination specified'))
     dest = pats.pop()
     destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
     if not destdirexists:
         if len(pats) > 1 or matchmod.patkind(pats[0]):
-            raise util.Abort(_('with multiple sources, destination must be an '
+            raise error.Abort(_('with multiple sources, destination must be an '
                                'existing directory'))
         if util.endswithsep(dest):
-            raise util.Abort(_('destination %s is not a directory') % dest)
+            raise error.Abort(_('destination %s is not a directory') % dest)
 
     tfn = targetpathfn
     if after:
@@ -738,7 +738,7 @@
             continue
         copylist.append((tfn(pat, dest, srcs), srcs))
     if not copylist:
-        raise util.Abort(_('no files to copy'))
+        raise error.Abort(_('no files to copy'))
 
     errors = 0
     for targetpath, srcs in copylist:
@@ -786,7 +786,7 @@
                 return not os.path.exists(lockpath)
             pid = util.rundetached(runargs, condfn)
             if pid < 0:
-                raise util.Abort(_('child process failed to start'))
+                raise error.Abort(_('child process failed to start'))
             writepid(pid)
         finally:
             try:
@@ -831,6 +831,27 @@
     if runfn:
         return runfn()
 
+## facility to let extension process additional data into an import patch
+# list of identifier to be executed in order
+extrapreimport = []  # run before commit
+extrapostimport = [] # run after commit
+# mapping from identifier to actual import function
+#
+# 'preimport' are run before the commit is made and are provided the following
+# arguments:
+# - repo: the localrepository instance,
+# - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
+# - extra: the future extra dictionary of the changeset, please mutate it,
+# - opts: the import options.
+# XXX ideally, we would just pass an ctx ready to be computed, that would allow
+# mutation of in memory commit and more. Feel free to rework the code to get
+# there.
+extrapreimportmap = {}
+# 'postimport' are run after the commit is made and are provided the following
+# argument:
+# - ctx: the changectx created by import.
+extrapostimportmap = {}
+
 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
     """Utility function used by commands.import to import a single patch
 
@@ -848,8 +869,17 @@
     :updatefunc: a function that update a repo to a given node
                  updatefunc(<repo>, <node>)
     """
-    tmpname, message, user, date, branch, nodeid, p1, p2 = \
-        patch.extract(ui, hunk)
+    # avoid cycle context -> subrepo -> cmdutil
+    import context
+    extractdata = patch.extract(ui, hunk)
+    tmpname = extractdata.get('filename')
+    message = extractdata.get('message')
+    user = extractdata.get('user')
+    date = extractdata.get('date')
+    branch = extractdata.get('branch')
+    nodeid = extractdata.get('nodeid')
+    p1 = extractdata.get('p1')
+    p2 = extractdata.get('p2')
 
     update = not opts.get('bypass')
     strip = opts["strip"]
@@ -860,7 +890,6 @@
     msg = _('applied to working directory')
 
     rejects = False
-    dsguard = None
 
     try:
         cmdline_message = logmessage(ui, opts)
@@ -879,7 +908,7 @@
             parents.append(repo[nullid])
         if opts.get('exact'):
             if not nodeid or not p1:
-                raise util.Abort(_('not a Mercurial patch'))
+                raise error.Abort(_('not a Mercurial patch'))
             p1 = repo[p1]
             p2 = repo[p2 or nullid]
         elif p2:
@@ -902,7 +931,6 @@
 
         n = None
         if update:
-            dsguard = dirstateguard(repo, 'tryimportone')
             if p1 != parents[0]:
                 updatefunc(repo, p1.node())
             if p2 != parents[1]:
@@ -918,7 +946,7 @@
                             files=files, eolmode=None, similarity=sim / 100.0)
             except patch.PatchError as e:
                 if not partial:
-                    raise util.Abort(str(e))
+                    raise error.Abort(str(e))
                 if partial:
                     rejects = True
 
@@ -940,15 +968,19 @@
                 else:
                     editor = getcommiteditor(editform=editform, **opts)
                 allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit')
+                extra = {}
+                for idfunc in extrapreimport:
+                    extrapreimportmap[idfunc](repo, extractdata, extra, opts)
                 try:
                     if partial:
                         repo.ui.setconfig('ui', 'allowemptycommit', True)
                     n = repo.commit(message, opts.get('user') or user,
                                     opts.get('date') or date, match=m,
-                                    editor=editor)
+                                    editor=editor, extra=extra)
+                    for idfunc in extrapostimport:
+                        extrapostimportmap[idfunc](repo[n])
                 finally:
                     repo.ui.restoreconfig(allowemptyback)
-            dsguard.close()
         else:
             if opts.get('exact') or opts.get('import_branch'):
                 branch = branch or 'default'
@@ -961,7 +993,7 @@
                     patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
                                     files, eolmode=None)
                 except patch.PatchError as e:
-                    raise util.Abort(str(e))
+                    raise error.Abort(str(e))
                 if opts.get('exact'):
                     editor = None
                 else:
@@ -980,17 +1012,24 @@
             # and branch bits
             ui.warn(_("warning: can't check exact import with --no-commit\n"))
         elif opts.get('exact') and hex(n) != nodeid:
-            raise util.Abort(_('patch is damaged or loses information'))
+            raise error.Abort(_('patch is damaged or loses information'))
         if n:
             # i18n: refers to a short changeset id
             msg = _('created %s') % short(n)
         return (msg, n, rejects)
     finally:
-        lockmod.release(dsguard)
         os.unlink(tmpname)
 
+# facility to let extensions include additional data in an exported patch
+# list of identifiers to be executed in order
+extraexport = []
+# mapping from identifier to actual export function
+# function as to return a string to be added to the header or None
+# it is given two arguments (sequencenumber, changectx)
+extraexportmap = {}
+
 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
-           opts=None):
+           opts=None, match=None):
     '''export changesets as hg patches.'''
 
     total = len(revs)
@@ -1038,10 +1077,15 @@
         write("# Parent  %s\n" % hex(prev))
         if len(parents) > 1:
             write("# Parent  %s\n" % hex(parents[1]))
+
+        for headerid in extraexport:
+            header = extraexportmap[headerid](seqno, ctx)
+            if header is not None:
+                write('# %s\n' % header)
         write(ctx.description().rstrip())
         write("\n\n")
 
-        for chunk, label in patch.diffui(repo, prev, node, opts=opts):
+        for chunk, label in patch.diffui(repo, prev, node, match, opts=opts):
             write(chunk, label=label)
 
         if shouldclose:
@@ -1193,7 +1237,7 @@
             # i18n: column positioning for "hg log"
             self.ui.write(_("phase:       %s\n") % ctx.phasestr(),
                           label='log.phase')
-        for pctx in self._meaningful_parentrevs(ctx):
+        for pctx in scmutil.meaningfulparents(self.repo, ctx):
             label = 'log.parent changeset.%s' % pctx.phasestr()
             # i18n: column positioning for "hg log"
             self.ui.write(_("parent:      %d:%s\n")
@@ -1277,22 +1321,6 @@
                                match=matchfn, stat=False)
             self.ui.write("\n")
 
-    def _meaningful_parentrevs(self, ctx):
-        """Return list of meaningful (or all if debug) parentrevs for rev.
-
-        For merges (two non-nullrev revisions) both parents are meaningful.
-        Otherwise the first parent revision is considered meaningful if it
-        is not the preceding revision.
-        """
-        parents = ctx.parents()
-        if len(parents) > 1:
-            return parents
-        if self.ui.debugflag:
-            return [parents[0], self.repo['null']]
-        if parents[0].rev() >= scmutil.intrev(ctx.rev()) - 1:
-            return []
-        return parents
-
 class jsonchangeset(changeset_printer):
     '''format changeset information.'''
 
@@ -1412,34 +1440,7 @@
 
         self.cache = {}
 
-    def _show(self, ctx, copies, matchfn, props):
-        '''show a single changeset or file revision'''
-
-        showlist = templatekw.showlist
-
-        # showparents() behaviour depends on ui trace level which
-        # causes unexpected behaviours at templating level and makes
-        # it harder to extract it in a standalone function. Its
-        # behaviour cannot be changed so leave it here for now.
-        def showparents(**args):
-            ctx = args['ctx']
-            parents = [[('rev', p.rev()),
-                        ('node', p.hex()),
-                        ('phase', p.phasestr())]
-                       for p in self._meaningful_parentrevs(ctx)]
-            return showlist('parent', parents, **args)
-
-        props = props.copy()
-        props.update(templatekw.keywords)
-        props['parents'] = showparents
-        props['templ'] = self.t
-        props['ctx'] = ctx
-        props['repo'] = self.repo
-        props['revcache'] = {'copies': copies}
-        props['cache'] = self.cache
-
         # find correct templates for current mode
-
         tmplmodes = [
             (True, None),
             (self.ui.verbose, 'verbose'),
@@ -1447,18 +1448,40 @@
             (self.ui.debugflag, 'debug'),
         ]
 
-        types = {'header': '', 'footer':'', 'changeset': 'changeset'}
-        for mode, postfix  in tmplmodes:
-            for type in types:
-                cur = postfix and ('%s_%s' % (type, postfix)) or type
+        self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
+                       'docheader': '', 'docfooter': ''}
+        for mode, postfix in tmplmodes:
+            for t in self._parts:
+                cur = t
+                if postfix:
+                    cur += "_" + postfix
                 if mode and cur in self.t:
-                    types[type] = cur
+                    self._parts[t] = cur
+
+        if self._parts['docheader']:
+            self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
+
+    def close(self):
+        if self._parts['docfooter']:
+            if not self.footer:
+                self.footer = ""
+            self.footer += templater.stringify(self.t(self._parts['docfooter']))
+        return super(changeset_templater, self).close()
+
+    def _show(self, ctx, copies, matchfn, props):
+        '''show a single changeset or file revision'''
+        props = props.copy()
+        props.update(templatekw.keywords)
+        props['templ'] = self.t
+        props['ctx'] = ctx
+        props['repo'] = self.repo
+        props['revcache'] = {'copies': copies}
+        props['cache'] = self.cache
 
         try:
-
             # write header
-            if types['header']:
-                h = templater.stringify(self.t(types['header'], **props))
+            if self._parts['header']:
+                h = templater.stringify(self.t(self._parts['header'], **props))
                 if self.buffered:
                     self.header[ctx.rev()] = h
                 else:
@@ -1467,20 +1490,19 @@
                         self.ui.write(h)
 
             # write changeset metadata, then patch if requested
-            key = types['changeset']
+            key = self._parts['changeset']
             self.ui.write(templater.stringify(self.t(key, **props)))
             self.showpatch(ctx.node(), matchfn)
 
-            if types['footer']:
+            if self._parts['footer']:
                 if not self.footer:
-                    self.footer = templater.stringify(self.t(types['footer'],
-                                                      **props))
-
+                    self.footer = templater.stringify(
+                        self.t(self._parts['footer'], **props))
         except KeyError as inst:
             msg = _("%s: no key named '%s'")
-            raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
+            raise error.Abort(msg % (self.t.mapfile, inst.args[0]))
         except SyntaxError as inst:
-            raise util.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
+            raise error.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
 
 def gettemplate(ui, tmpl, style):
     """
@@ -1541,7 +1563,7 @@
         t = changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile,
                                 buffered)
     except SyntaxError as inst:
-        raise util.Abort(inst.args[0])
+        raise error.Abort(inst.args[0])
     return t
 
 def showmarker(ui, marker):
@@ -1581,7 +1603,7 @@
                       (rev, util.datestr(results[rev])))
             return str(rev)
 
-    raise util.Abort(_("revision matching date not found"))
+    raise error.Abort(_("revision matching date not found"))
 
 def increasingwindows(windowsize=8, sizelimit=512):
     while True:
@@ -1636,7 +1658,7 @@
         for filename in match.files():
             if follow:
                 if filename not in pctx:
-                    raise util.Abort(_('cannot follow file not in parent '
+                    raise error.Abort(_('cannot follow file not in parent '
                                        'revision: "%s"') % filename)
                 yield filename, pctx[filename].filenode()
             else:
@@ -1651,7 +1673,7 @@
                 # A zero count may be a directory or deleted file, so
                 # try to find matching entries on the slow path.
                 if follow:
-                    raise util.Abort(
+                    raise error.Abort(
                         _('cannot follow nonexistent file: "%s"') % file_)
                 raise FileWalkError("Cannot walk via filelog")
             else:
@@ -1782,7 +1804,7 @@
         # changed files
 
         if follow:
-            raise util.Abort(_('can only follow copies/renames for explicit '
+            raise error.Abort(_('can only follow copies/renames for explicit '
                                'filenames'))
 
         # The slow path checks files modified in every changeset.
@@ -1928,7 +1950,7 @@
         followfirst = 1
     else:
         followfirst = 0
-    # --follow with FILE behaviour depends on revs...
+    # --follow with FILE behavior depends on revs...
     it = iter(revs)
     startrev = it.next()
     followdescendants = startrev < next(it, startrev)
@@ -1954,14 +1976,14 @@
                     slowpath = True
                     continue
                 else:
-                    raise util.Abort(_('cannot follow file not in parent '
+                    raise error.Abort(_('cannot follow file not in parent '
                                        'revision: "%s"') % f)
             filelog = repo.file(f)
             if not filelog:
                 # A zero count may be a directory or deleted file, so
                 # try to find matching entries on the slow path.
                 if follow:
-                    raise util.Abort(
+                    raise error.Abort(
                         _('cannot follow nonexistent file: "%s"') % f)
                 slowpath = True
 
@@ -2049,7 +2071,7 @@
     return expr, filematcher
 
 def _logrevs(repo, opts):
-    # Default --rev value depends on --follow but --follow behaviour
+    # Default --rev value depends on --follow but --follow behavior
     # depends on revisions resolved from --rev...
     follow = opts.get('follow') or opts.get('follow_first')
     if opts.get('rev'):
@@ -2185,7 +2207,7 @@
 def checkunsupportedgraphflags(pats, opts):
     for op in ["newest_first"]:
         if op in opts and opts[op]:
-            raise util.Abort(_("-G/--graph option is incompatible with --%s")
+            raise error.Abort(_("-G/--graph option is incompatible with --%s")
                              % op.replace("_", "-"))
 
 def graphrevs(repo, nodes, opts):
@@ -2207,7 +2229,12 @@
     if abort or warn:
         cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
 
-    for f in wctx.walk(matchmod.badmatch(match, badfn)):
+    badmatch = matchmod.badmatch(match, badfn)
+    dirstate = repo.dirstate
+    # We don't want to just call wctx.walk here, since it would return a lot of
+    # clean files, which we aren't interested in and takes time.
+    for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
+                                  True, False, full=False)):
         exact = match.exact(f)
         if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
             if cca:
@@ -2458,12 +2485,15 @@
     # that doesn't support addremove
     if opts.get('addremove'):
         if scmutil.addremove(repo, matcher, "", opts) != 0:
-            raise util.Abort(
+            raise error.Abort(
                 _("failed to mark all new/missing files as added/removed"))
 
     return commitfunc(ui, repo, message, matcher, opts)
 
 def amend(ui, repo, commitfunc, old, extra, pats, opts):
+    # avoid cycle context -> subrepo -> cmdutil
+    import context
+
     # amend will reuse the existing user if not specified, but the obsolete
     # marker creation requires that the current user's name is specified.
     if obsolete.isenabled(repo, obsolete.createmarkersopt):
@@ -2473,10 +2503,9 @@
     base = old.p1()
     createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
 
-    wlock = dsguard = lock = newid = None
+    wlock = lock = newid = None
     try:
         wlock = repo.wlock()
-        dsguard = dirstateguard(repo, 'amend')
         lock = repo.lock()
         tr = repo.transaction('amend')
         try:
@@ -2648,7 +2677,6 @@
             tr.close()
         finally:
             tr.release()
-        dsguard.close()
         if not createmarkers and newid != old.node():
             # Strip the intermediate commit (if there was one) and the amended
             # commit
@@ -2657,25 +2685,28 @@
             ui.note(_('stripping amended changeset %s\n') % old)
             repair.strip(ui, repo, old.node(), topic='amend-backup')
     finally:
-        lockmod.release(lock, dsguard, wlock)
+        lockmod.release(lock, wlock)
     return newid
 
 def commiteditor(repo, ctx, subs, editform=''):
     if ctx.description():
         return ctx.description()
-    return commitforceeditor(repo, ctx, subs, editform=editform)
+    return commitforceeditor(repo, ctx, subs, editform=editform,
+                             unchangedmessagedetection=True)
 
 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
-                      editform=''):
+                      editform='', unchangedmessagedetection=False):
     if not extramsg:
         extramsg = _("Leave message empty to abort commit.")
 
     forms = [e for e in editform.split('.') if e]
     forms.insert(0, 'changeset')
+    templatetext = None
     while forms:
         tmpl = repo.ui.config('committemplate', '.'.join(forms))
         if tmpl:
-            committext = buildcommittemplate(repo, ctx, subs, extramsg, tmpl)
+            templatetext = committext = buildcommittemplate(
+                repo, ctx, subs, extramsg, tmpl)
             break
         forms.pop()
     else:
@@ -2684,14 +2715,23 @@
     # run editor in the repository root
     olddir = os.getcwd()
     os.chdir(repo.root)
-    text = repo.ui.edit(committext, ctx.user(), ctx.extra(), editform=editform)
-    text = re.sub("(?m)^HG:.*(\n|$)", "", text)
+
+    # make in-memory changes visible to external process
+    tr = repo.currenttransaction()
+    repo.dirstate.write(tr)
+    pending = tr and tr.writepending() and repo.root
+
+    editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
+                        editform=editform, pending=pending)
+    text = re.sub("(?m)^HG:.*(\n|$)", "", editortext)
     os.chdir(olddir)
 
     if finishdesc:
         text = finishdesc(text)
     if not text.strip():
-        raise util.Abort(_("empty commit message"))
+        raise error.Abort(_("empty commit message"))
+    if unchangedmessagedetection and editortext == templatetext:
+        raise error.Abort(_("commit message unchanged"))
 
     return text
 
@@ -2702,7 +2742,7 @@
     try:
         t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
     except SyntaxError as inst:
-        raise util.Abort(inst.args[0])
+        raise error.Abort(inst.args[0])
 
     for k, v in repo.ui.configitems('committemplate'):
         if k != 'changeset':
@@ -2715,6 +2755,9 @@
     t.show(ctx, extramsg=extramsg)
     return ui.popbuffer()
 
+def hgprefix(msg):
+    return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
+
 def buildcommittext(repo, ctx, subs, extramsg):
     edittext = []
     modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
@@ -2722,28 +2765,30 @@
         edittext.append(ctx.description())
     edittext.append("")
     edittext.append("") # Empty line between message and comments.
-    edittext.append(_("HG: Enter commit message."
-                      "  Lines beginning with 'HG:' are removed."))
-    edittext.append("HG: %s" % extramsg)
+    edittext.append(hgprefix(_("Enter commit message."
+                      "  Lines beginning with 'HG:' are removed.")))
+    edittext.append(hgprefix(extramsg))
     edittext.append("HG: --")
-    edittext.append(_("HG: user: %s") % ctx.user())
+    edittext.append(hgprefix(_("user: %s") % ctx.user()))
     if ctx.p2():
-        edittext.append(_("HG: branch merge"))
+        edittext.append(hgprefix(_("branch merge")))
     if ctx.branch():
-        edittext.append(_("HG: branch '%s'") % ctx.branch())
+        edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
     if bookmarks.isactivewdirparent(repo):
-        edittext.append(_("HG: bookmark '%s'") % repo._activebookmark)
-    edittext.extend([_("HG: subrepo %s") % s for s in subs])
-    edittext.extend([_("HG: added %s") % f for f in added])
-    edittext.extend([_("HG: changed %s") % f for f in modified])
-    edittext.extend([_("HG: removed %s") % f for f in removed])
+        edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
+    edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
+    edittext.extend([hgprefix(_("added %s") % f) for f in added])
+    edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
+    edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
     if not added and not modified and not removed:
-        edittext.append(_("HG: no files changed"))
+        edittext.append(hgprefix(_("no files changed")))
     edittext.append("")
 
     return "\n".join(edittext)
 
-def commitstatus(repo, node, branch, bheads=None, opts={}):
+def commitstatus(repo, node, branch, bheads=None, opts=None):
+    if opts is None:
+        opts = {}
     ctx = repo[node]
     parents = ctx.parents()
 
@@ -3064,7 +3109,7 @@
                 try:
                     wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
                 except KeyError:
-                    raise util.Abort("subrepository '%s' does not exist in %s!"
+                    raise error.Abort("subrepository '%s' does not exist in %s!"
                                       % (sub, short(ctx.node())))
     finally:
         wlock.release()
@@ -3135,7 +3180,7 @@
                 chunks = patch.reversehunks(chunks)
 
         except patch.PatchError as err:
-            raise util.Abort(_('error parsing patch: %s') % err)
+            raise error.Abort(_('error parsing patch: %s') % err)
 
         newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
         # Apply changes
@@ -3148,7 +3193,7 @@
             try:
                 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
             except patch.PatchError as err:
-                raise util.Abort(str(err))
+                raise error.Abort(str(err))
         del fp
     else:
         for f in actions['revert'][0]:
@@ -3269,7 +3314,7 @@
         if commit and allowcommit:
             continue
         if repo.vfs.exists(f):
-            raise util.Abort(msg, hint=hint)
+            raise error.Abort(msg, hint=hint)
 
 def clearunfinished(repo):
     '''Check for unfinished operations (as above), and clear the ones
@@ -3277,7 +3322,7 @@
     '''
     for f, clearable, allowcommit, msg, hint in unfinishedstates:
         if not clearable and repo.vfs.exists(f):
-            raise util.Abort(msg, hint=hint)
+            raise error.Abort(msg, hint=hint)
     for f, clearable, allowcommit, msg, hint in unfinishedstates:
         if clearable and repo.vfs.exists(f):
             util.unlink(repo.join(f))
@@ -3297,10 +3342,9 @@
     '''
 
     def __init__(self, repo, name):
-        repo.dirstate.write()
         self._repo = repo
-        self._filename = 'dirstate.backup.%s.%d' % (name, id(self))
-        repo.vfs.write(self._filename, repo.vfs.tryread('dirstate'))
+        self._suffix = '.backup.%s.%d' % (name, id(self))
+        repo.dirstate._savebackup(repo.currenttransaction(), self._suffix)
         self._active = True
         self._closed = False
 
@@ -3314,26 +3358,25 @@
 
     def close(self):
         if not self._active: # already inactivated
-            msg = (_("can't close already inactivated backup: %s")
-                   % self._filename)
-            raise util.Abort(msg)
-
-        self._repo.vfs.unlink(self._filename)
+            msg = (_("can't close already inactivated backup: dirstate%s")
+                   % self._suffix)
+            raise error.Abort(msg)
+
+        self._repo.dirstate._clearbackup(self._repo.currenttransaction(),
+                                         self._suffix)
         self._active = False
         self._closed = True
 
     def _abort(self):
-        # this "invalidate()" prevents "wlock.release()" from writing
-        # changes of dirstate out after restoring to original status
-        self._repo.dirstate.invalidate()
-
-        self._repo.vfs.rename(self._filename, 'dirstate')
+        self._repo.dirstate._restorebackup(self._repo.currenttransaction(),
+                                           self._suffix)
         self._active = False
 
     def release(self):
         if not self._closed:
             if not self._active: # already inactivated
-                msg = (_("can't release already inactivated backup: %s")
-                       % self._filename)
-                raise util.Abort(msg)
+                msg = (_("can't release already inactivated backup:"
+                         " dirstate%s")
+                       % self._suffix)
+                raise error.Abort(msg)
             self._abort()
--- a/mercurial/commands.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/commands.py	Tue Oct 20 15:59:10 2015 -0500
@@ -13,16 +13,17 @@
 import hg, scmutil, util, revlog, copies, error, bookmarks
 import patch, help, encoding, templatekw, discovery
 import archival, changegroup, cmdutil, hbisect
-import sshserver, hgweb, commandserver
+import sshserver, hgweb
 import extensions
 from hgweb import server as hgweb_server
 import merge as mergemod
 import minirst, revset, fileset
 import dagparser, context, simplemerge, graphmod, copies
-import random
-import setdiscovery, treediscovery, dagutil, pvec, localrepo
+import random, operator
+import setdiscovery, treediscovery, dagutil, pvec, localrepo, destutil
 import phases, obsolete, exchange, bundle2, repair, lock as lockmod
 import ui as uimod
+import streamclone
 
 table = {}
 
@@ -238,9 +239,9 @@
     try:
         sim = float(opts.get('similarity') or 100)
     except ValueError:
-        raise util.Abort(_('similarity must be a number'))
+        raise error.Abort(_('similarity must be a number'))
     if sim < 0 or sim > 100:
-        raise util.Abort(_('similarity must be between 0 and 100'))
+        raise error.Abort(_('similarity must be between 0 and 100'))
     matcher = scmutil.match(repo[None], pats, opts)
     return scmutil.addremove(repo, matcher, "", opts, similarity=sim / 100.0)
 
@@ -276,7 +277,7 @@
     Returns 0 on success.
     """
     if not pats:
-        raise util.Abort(_('at least one filename or pattern is required'))
+        raise error.Abort(_('at least one filename or pattern is required'))
 
     if opts.get('follow'):
         # --follow is deprecated and now just an alias for -f/--file
@@ -333,7 +334,7 @@
 
     linenumber = opts.get('line_number') is not None
     if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
-        raise util.Abort(_('at least one of -n/-c is required for -l'))
+        raise error.Abort(_('at least one of -n/-c is required for -l'))
 
     if fm:
         def makefunc(get, fmt):
@@ -348,7 +349,7 @@
                       if opts.get(op))
 
     def bad(x, y):
-        raise util.Abort("%s: %s" % (x, y))
+        raise error.Abort("%s: %s" % (x, y))
 
     m = scmutil.match(ctx, pats, opts, badfn=bad)
 
@@ -438,18 +439,18 @@
 
     ctx = scmutil.revsingle(repo, opts.get('rev'))
     if not ctx:
-        raise util.Abort(_('no working directory: please specify a revision'))
+        raise error.Abort(_('no working directory: please specify a revision'))
     node = ctx.node()
     dest = cmdutil.makefilename(repo, dest, node)
     if os.path.realpath(dest) == repo.root:
-        raise util.Abort(_('repository root cannot be destination'))
+        raise error.Abort(_('repository root cannot be destination'))
 
     kind = opts.get('type') or archival.guesskind(dest) or 'files'
     prefix = opts.get('prefix')
 
     if dest == '-':
         if kind == 'files':
-            raise util.Abort(_('cannot archive plain files to stdout'))
+            raise error.Abort(_('cannot archive plain files to stdout'))
         dest = cmdutil.makefileobj(repo, dest)
         if not prefix:
             prefix = os.path.basename(repo.root) + '-%h'
@@ -497,17 +498,20 @@
 
     See :hg:`help dates` for a list of formats valid for -d/--date.
 
+    See :hg:`help revert` for a way to restore files to the state
+    of another revision.
+
     Returns 0 on success, 1 if nothing to backout or there are unresolved
     files.
     '''
     if rev and node:
-        raise util.Abort(_("please specify just one revision"))
+        raise error.Abort(_("please specify just one revision"))
 
     if not rev:
         rev = node
 
     if not rev:
-        raise util.Abort(_("please specify a revision to backout"))
+        raise error.Abort(_("please specify a revision to backout"))
 
     date = opts.get('date')
     if date:
@@ -519,22 +523,22 @@
 
     op1, op2 = repo.dirstate.parents()
     if not repo.changelog.isancestor(node, op1):
-        raise util.Abort(_('cannot backout change that is not an ancestor'))
+        raise error.Abort(_('cannot backout change that is not an ancestor'))
 
     p1, p2 = repo.changelog.parents(node)
     if p1 == nullid:
-        raise util.Abort(_('cannot backout a change with no parents'))
+        raise error.Abort(_('cannot backout a change with no parents'))
     if p2 != nullid:
         if not opts.get('parent'):
-            raise util.Abort(_('cannot backout a merge changeset'))
+            raise error.Abort(_('cannot backout a merge changeset'))
         p = repo.lookup(opts['parent'])
         if p not in (p1, p2):
-            raise util.Abort(_('%s is not a parent of %s') %
+            raise error.Abort(_('%s is not a parent of %s') %
                              (short(p), short(node)))
         parent = p
     else:
         if opts.get('parent'):
-            raise util.Abort(_('cannot use --parent on non-merge changeset'))
+            raise error.Abort(_('cannot use --parent on non-merge changeset'))
         parent = p1
 
     # the backout should appear on the same branch
@@ -544,14 +548,14 @@
         bheads = repo.branchheads(branch)
         rctx = scmutil.revsingle(repo, hex(parent))
         if not opts.get('merge') and op1 != node:
+            dsguard = cmdutil.dirstateguard(repo, 'backout')
             try:
                 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
                              'backout')
-                repo.dirstate.beginparentchange()
                 stats = mergemod.update(repo, parent, True, True, False,
                                         node, False)
                 repo.setparents(op1, op2)
-                repo.dirstate.endparentchange()
+                dsguard.close()
                 hg._showstats(repo, stats)
                 if stats[3]:
                     repo.ui.status(_("use 'hg resolve' to retry unresolved "
@@ -564,6 +568,7 @@
                     return 0
             finally:
                 ui.setconfig('ui', 'forcemerge', '', '')
+                lockmod.release(dsguard)
         else:
             hg.clean(repo, node, show_stats=False)
             repo.dirstate.setbranch(branch)
@@ -742,9 +747,9 @@
             if (good or bad or skip or reset) and interactive:
                 return
             if not state['good']:
-                raise util.Abort(_('cannot bisect (no known good revisions)'))
+                raise error.Abort(_('cannot bisect (no known good revisions)'))
             else:
-                raise util.Abort(_('cannot bisect (no known bad revisions)'))
+                raise error.Abort(_('cannot bisect (no known bad revisions)'))
         return True
 
     # backward compatibility
@@ -758,7 +763,7 @@
         else:
             reset = True
     elif extra or good + bad + skip + reset + extend + bool(command) > 1:
-        raise util.Abort(_('incompatible arguments'))
+        raise error.Abort(_('incompatible arguments'))
 
     cmdutil.checkunfinished(repo)
 
@@ -776,12 +781,12 @@
             try:
                 node = state['current'][0]
             except LookupError:
-                raise util.Abort(_('current bisect revision is unknown - '
+                raise error.Abort(_('current bisect revision is unknown - '
                                    'start a new bisect to fix'))
         else:
             node, p2 = repo.dirstate.parents()
             if p2 != nullid:
-                raise util.Abort(_('current bisect revision is a merge'))
+                raise error.Abort(_('current bisect revision is a merge'))
         try:
             while changesets:
                 # update state
@@ -794,9 +799,9 @@
                     transition = "good"
                 # status < 0 means process was killed
                 elif status == 127:
-                    raise util.Abort(_("failed to execute %s") % command)
+                    raise error.Abort(_("failed to execute %s") % command)
                 elif status < 0:
-                    raise util.Abort(_("%s killed") % command)
+                    raise error.Abort(_("%s killed") % command)
                 else:
                     transition = "bad"
                 ctx = scmutil.revsingle(repo, rev, node)
@@ -850,7 +855,7 @@
                     return
                 cmdutil.bailifchanged(repo)
                 return hg.clean(repo, extendnode.node())
-        raise util.Abort(_("nothing to extend"))
+        raise error.Abort(_("nothing to extend"))
 
     if changesets == 0:
         print_result(nodes, good)
@@ -875,7 +880,7 @@
     [('f', 'force', False, _('force')),
     ('r', 'rev', '', _('revision'), _('REV')),
     ('d', 'delete', False, _('delete a given bookmark')),
-    ('m', 'rename', '', _('rename a given bookmark'), _('NAME')),
+    ('m', 'rename', '', _('rename a given bookmark'), _('OLD')),
     ('i', 'inactive', False, _('mark a bookmark inactive')),
     ] + formatteropts,
     _('hg bookmarks [OPTIONS]... [NAME]...'))
@@ -916,6 +921,10 @@
 
           hg book -r .^ tested
 
+      - rename bookmark turkey to dinner::
+
+          hg book -m turkey dinner
+
       - move the '@' bookmark from another branch::
 
           hg book -f @
@@ -929,7 +938,7 @@
     def checkformat(mark):
         mark = mark.strip()
         if not mark:
-            raise util.Abort(_("bookmark names cannot consist entirely of "
+            raise error.Abort(_("bookmark names cannot consist entirely of "
                                "whitespace"))
         scmutil.checknewlabel(repo, mark, 'bookmark')
         return mark
@@ -959,21 +968,21 @@
                     ui.status(_("moving bookmark '%s' forward from %s\n") %
                               (mark, short(bmctx.node())))
                     return
-            raise util.Abort(_("bookmark '%s' already exists "
+            raise error.Abort(_("bookmark '%s' already exists "
                                "(use -f to force)") % mark)
         if ((mark in repo.branchmap() or mark == repo.dirstate.branch())
             and not force):
-            raise util.Abort(
+            raise error.Abort(
                 _("a bookmark cannot have the name of an existing branch"))
 
     if delete and rename:
-        raise util.Abort(_("--delete and --rename are incompatible"))
+        raise error.Abort(_("--delete and --rename are incompatible"))
     if delete and rev:
-        raise util.Abort(_("--rev is incompatible with --delete"))
+        raise error.Abort(_("--rev is incompatible with --delete"))
     if rename and rev:
-        raise util.Abort(_("--rev is incompatible with --rename"))
+        raise error.Abort(_("--rev is incompatible with --rename"))
     if not names and (delete or rev):
-        raise util.Abort(_("bookmark name required"))
+        raise error.Abort(_("bookmark name required"))
 
     if delete or rename or names or inactive:
         wlock = lock = tr = None
@@ -986,7 +995,7 @@
                 tr = repo.transaction('bookmark')
                 for mark in names:
                     if mark not in marks:
-                        raise util.Abort(_("bookmark '%s' does not exist") %
+                        raise error.Abort(_("bookmark '%s' does not exist") %
                                          mark)
                     if mark == repo._activebookmark:
                         bookmarks.deactivate(repo)
@@ -995,12 +1004,13 @@
             elif rename:
                 tr = repo.transaction('bookmark')
                 if not names:
-                    raise util.Abort(_("new bookmark name required"))
+                    raise error.Abort(_("new bookmark name required"))
                 elif len(names) > 1:
-                    raise util.Abort(_("only one new bookmark name allowed"))
+                    raise error.Abort(_("only one new bookmark name allowed"))
                 mark = checkformat(names[0])
                 if rename not in marks:
-                    raise util.Abort(_("bookmark '%s' does not exist") % rename)
+                    raise error.Abort(_("bookmark '%s' does not exist")
+                                      % rename)
                 checkconflict(repo, mark, cur, force)
                 marks[mark] = marks[rename]
                 if repo._activebookmark == rename and not inactive:
@@ -1111,7 +1121,7 @@
         elif label:
             if not opts.get('force') and label in repo.branchmap():
                 if label not in [p.branch() for p in repo.parents()]:
-                    raise util.Abort(_('a branch of the same name already'
+                    raise error.Abort(_('a branch of the same name already'
                                        ' exists'),
                                      # i18n: "it" refers to an existing branch
                                      hint=_("use 'hg update' to switch to it"))
@@ -1212,9 +1222,11 @@
     parameters. To create a bundle containing all changesets, use
     -a/--all (or --base null).
 
-    You can change compression method with the -t/--type option.
-    The available compression methods are: none, bzip2, and
-    gzip (by default, bundles are compressed using bzip2).
+    You can change bundle format with the -t/--type option. You can
+    specify a compression, a bundle version or both using a dash
+    (comp-version). The available compression methods are: none, bzip2,
+    and gzip (by default, bundles are compressed using bzip2). The
+    available format are: v1, v2 (default to most suitable).
 
     The bundle file can then be transferred using conventional means
     and applied to another repository with the unbundle or pull
@@ -1231,13 +1243,18 @@
         revs = scmutil.revrange(repo, opts['rev'])
 
     bundletype = opts.get('type', 'bzip2').lower()
-    btypes = {'none': 'HG10UN',
-              'bzip2': 'HG10BZ',
-              'gzip': 'HG10GZ',
-              'bundle2': 'HG20'}
-    bundletype = btypes.get(bundletype)
-    if bundletype not in changegroup.bundletypes:
-        raise util.Abort(_('unknown bundle type specified with --type'))
+    try:
+        bcompression, cgversion, params = exchange.parsebundlespec(
+                repo, bundletype, strict=False)
+    except error.UnsupportedBundleSpecification as e:
+        raise error.Abort(str(e),
+                          hint=_('see "hg help bundle" for supported '
+                                 'values for --type'))
+
+    # Packed bundles are a pseudo bundle format for now.
+    if cgversion == 's1':
+        raise error.Abort(_('packed bundles cannot be produced by "hg bundle"'),
+                          hint=_('use "hg debugcreatestreamclonebundle"'))
 
     if opts.get('all'):
         base = ['null']
@@ -1247,12 +1264,13 @@
     bundlecaps = None
     if base:
         if dest:
-            raise util.Abort(_("--base is incompatible with specifying "
+            raise error.Abort(_("--base is incompatible with specifying "
                                "a destination"))
         common = [repo.lookup(rev) for rev in base]
         heads = revs and map(repo.lookup, revs) or revs
         cg = changegroup.getchangegroup(repo, 'bundle', heads=heads,
-                                         common=common, bundlecaps=bundlecaps)
+                                         common=common, bundlecaps=bundlecaps,
+                                         version=cgversion)
         outgoing = None
     else:
         dest = ui.expandpath(dest or 'default-push', dest or 'default')
@@ -1265,12 +1283,22 @@
                                                 force=opts.get('force'),
                                                 portable=True)
         cg = changegroup.getlocalchangegroup(repo, 'bundle', outgoing,
-                                             bundlecaps)
+                                                bundlecaps, version=cgversion)
     if not cg:
         scmutil.nochangesfound(ui, repo, outgoing and outgoing.excluded)
         return 1
 
-    changegroup.writebundle(ui, cg, fname, bundletype)
+    if cgversion == '01': #bundle1
+        if bcompression is None:
+            bcompression = 'UN'
+        bversion = 'HG10' + bcompression
+        bcompression = None
+    else:
+        assert cgversion == '02'
+        bversion = 'HG20'
+
+
+    changegroup.writebundle(ui, cg, fname, bversion, compression=bcompression)
 
 @command('cat',
     [('o', 'output', '',
@@ -1414,7 +1442,7 @@
     Returns 0 on success.
     """
     if opts.get('noupdate') and opts.get('updaterev'):
-        raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
+        raise error.Abort(_("cannot specify both --noupdate and --updaterev"))
 
     r = hg.clone(ui, opts, source, dest,
                  pull=opts.get('pull'),
@@ -1486,7 +1514,7 @@
 
     if opts.get('subrepos'):
         if opts.get('amend'):
-            raise util.Abort(_('cannot amend with --subrepos'))
+            raise error.Abort(_('cannot amend with --subrepos'))
         # Let --subrepos on the command line override config setting.
         ui.setconfig('ui', 'commitsubrepos', True, 'commit')
 
@@ -1500,24 +1528,24 @@
         extra['close'] = 1
 
         if not bheads:
-            raise util.Abort(_('can only close branch heads'))
+            raise error.Abort(_('can only close branch heads'))
         elif opts.get('amend'):
             if repo.parents()[0].p1().branch() != branch and \
                     repo.parents()[0].p2().branch() != branch:
-                raise util.Abort(_('can only close branch heads'))
+                raise error.Abort(_('can only close branch heads'))
 
     if opts.get('amend'):
         if ui.configbool('ui', 'commitsubrepos'):
-            raise util.Abort(_('cannot amend with ui.commitsubrepos enabled'))
+            raise error.Abort(_('cannot amend with ui.commitsubrepos enabled'))
 
         old = repo['.']
         if not old.mutable():
-            raise util.Abort(_('cannot amend public changesets'))
+            raise error.Abort(_('cannot amend public changesets'))
         if len(repo[None].parents()) > 1:
-            raise util.Abort(_('cannot amend while merging'))
+            raise error.Abort(_('cannot amend while merging'))
         allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
         if not allowunstable and old.children():
-            raise util.Abort(_('cannot amend changeset with children'))
+            raise error.Abort(_('cannot amend changeset with children'))
 
         # commitfunc is used only for temporary amend commit by cmdutil.amend
         def commitfunc(ui, repo, message, match, opts):
@@ -1599,11 +1627,11 @@
 
     if opts.get('edit') or opts.get('local') or opts.get('global'):
         if opts.get('local') and opts.get('global'):
-            raise util.Abort(_("can't use --local and --global together"))
+            raise error.Abort(_("can't use --local and --global together"))
 
         if opts.get('local'):
             if not repo:
-                raise util.Abort(_("can't use --local outside a repository"))
+                raise error.Abort(_("can't use --local outside a repository"))
             paths = [repo.join('hgrc')]
         elif opts.get('global'):
             paths = scmutil.systemrcpath()
@@ -1628,7 +1656,7 @@
 
         editor = ui.geteditor()
         ui.system("%s \"%s\"" % (editor, f),
-                  onerr=util.Abort, errprefix=_("edit failed"))
+                  onerr=error.Abort, errprefix=_("edit failed"))
         return
 
     for f in scmutil.rcpath():
@@ -1638,7 +1666,7 @@
         sections = [v for v in values if '.' not in v]
         items = [v for v in values if '.' in v]
         if len(items) > 1 or items and sections:
-            raise util.Abort(_('only one config item permitted'))
+            raise error.Abort(_('only one config item permitted'))
     matched = False
     for section, name, value in ui.walkconfig(untrusted=untrusted):
         value = str(value).replace('\n', '\\n')
@@ -1700,13 +1728,13 @@
         lookup = r.lookup
     elif len(args) == 2:
         if not repo:
-            raise util.Abort(_("there is no Mercurial repository here "
+            raise error.Abort(_("there is no Mercurial repository here "
                                "(.hg not found)"))
         rev1, rev2 = args
         r = repo.changelog
         lookup = repo.lookup
     else:
-        raise util.Abort(_('either two or three arguments required'))
+        raise error.Abort(_('either two or three arguments required'))
     a = r.ancestor(lookup(rev1), lookup(rev2))
     ui.write("%d:%s\n" % (r.rev(a), hex(a)))
 
@@ -1757,7 +1785,7 @@
 
     cl = repo.changelog
     if len(cl) > 0:
-        raise util.Abort(_('repository is not empty'))
+        raise error.Abort(_('repository is not empty'))
 
     # determine number of revs in DAG
     total = 0
@@ -1905,7 +1933,7 @@
                 showchunks(fname)
         else:
             if isinstance(gen, bundle2.unbundle20):
-                raise util.Abort(_('use debugbundle2 for this file'))
+                raise error.Abort(_('use debugbundle2 for this file'))
             chunkdata = gen.changelogheader()
             chain = None
             while True:
@@ -1921,7 +1949,7 @@
 def _debugbundle2(ui, gen, **opts):
     """lists the contents of a bundle2"""
     if not isinstance(gen, bundle2.unbundle20):
-        raise util.Abort(_('not a bundle2 file'))
+        raise error.Abort(_('not a bundle2 file'))
     ui.write(('Stream params: %s\n' % repr(gen.params)))
     for part in gen.iterparts():
         ui.write('%s -- %r\n' % (part.type, repr(part.params)))
@@ -1938,6 +1966,25 @@
                 ui.write("    %s\n" % hex(node))
                 chain = node
 
+@command('debugcreatestreamclonebundle', [], 'FILE')
+def debugcreatestreamclonebundle(ui, repo, fname):
+    """create a stream clone bundle file
+
+    Stream bundles are special bundles that are essentially archives of
+    revlog files. They are commonly used for cloning very quickly.
+    """
+    requirements, gen = streamclone.generatebundlev1(repo)
+    changegroup.writechunks(ui, gen, fname)
+
+    ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
+
+@command('debugapplystreamclonebundle', [], 'FILE')
+def debugapplystreamclonebundle(ui, repo, fname):
+    """apply a stream clone bundle file"""
+    f = hg.openpath(ui, fname)
+    gen = exchange.readbundle(ui, f, fname)
+    gen.apply(repo)
+
 @command('debugcheckstate', [], '')
 def debugcheckstate(ui, repo):
     """validate the correctness of the current dirstate"""
@@ -1964,7 +2011,7 @@
             errors += 1
     if errors:
         error = _(".hg/dirstate inconsistent with current parent's manifest")
-        raise util.Abort(error)
+        raise error.Abort(error)
 
 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
 def debugcommands(ui, cmd='', *args):
@@ -2052,7 +2099,7 @@
                         for l in ls:
                             yield 'l', (r, l)
     else:
-        raise util.Abort(_('need repo for changelog dag'))
+        raise error.Abort(_('need repo for changelog dag'))
 
     for line in dagparser.dagtextlines(events(),
                                        addspaces=spaces,
@@ -2079,7 +2126,7 @@
     try:
         ui.write(r.revision(r.lookup(rev)))
     except KeyError:
-        raise util.Abort(_('invalid revision identifier %s') % rev)
+        raise error.Abort(_('invalid revision identifier %s') % rev)
 
 @command('debugdate',
     [('e', 'extended', None, _('try extended date formats'))],
@@ -2116,7 +2163,7 @@
     def doit(localheads, remoteheads, remote=remote):
         if opts.get('old'):
             if localheads:
-                raise util.Abort('cannot use localheads with old style '
+                raise error.Abort('cannot use localheads with old style '
                                  'discovery')
             if not util.safehasattr(remote, 'branches'):
                 # enable in-client legacy support
@@ -2167,6 +2214,45 @@
         localrevs = opts.get('local_head')
         doit(localrevs, remoterevs)
 
+@command('debugextensions', formatteropts, [], norepo=True)
+def debugextensions(ui, **opts):
+    '''show information about active extensions'''
+    exts = extensions.extensions(ui)
+    fm = ui.formatter('debugextensions', opts)
+    for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
+        extsource = extmod.__file__
+        exttestedwith = getattr(extmod, 'testedwith', None)
+        if exttestedwith is not None:
+            exttestedwith = exttestedwith.split()
+        extbuglink = getattr(extmod, 'buglink', None)
+
+        fm.startitem()
+
+        if ui.quiet or ui.verbose:
+            fm.write('name', '%s\n', extname)
+        else:
+            fm.write('name', '%s', extname)
+            if not exttestedwith:
+                fm.plain(_(' (untested!)\n'))
+            else:
+                if exttestedwith == ['internal'] or \
+                                util.version() in exttestedwith:
+                    fm.plain('\n')
+                else:
+                    lasttestedversion = exttestedwith[-1]
+                    fm.plain(' (%s!)\n' % lasttestedversion)
+
+        fm.condwrite(ui.verbose and extsource, 'source',
+                 _('  location: %s\n'), extsource or "")
+
+        fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
+                 _('  tested with: %s\n'), ' '.join(exttestedwith or []))
+
+        fm.condwrite(ui.verbose and extbuglink, 'buglink',
+                 _('  bug reporting: %s\n'), extbuglink or "")
+
+    fm.end()
+
 @command('debugfileset',
     [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
     _('[-r REV] FILESPEC'))
@@ -2205,7 +2291,7 @@
     """
     repo = hg.peer(ui, opts, repopath)
     if not repo.capable('getbundle'):
-        raise util.Abort("getbundle() not supported by target repository")
+        raise error.Abort("getbundle() not supported by target repository")
     args = {}
     if common:
         args['common'] = [bin(s) for s in common]
@@ -2222,7 +2308,7 @@
               'bundle2': 'HG20'}
     bundletype = btypes.get(bundletype)
     if bundletype not in changegroup.bundletypes:
-        raise util.Abort(_('unknown bundle type specified with --type'))
+        raise error.Abort(_('unknown bundle type specified with --type'))
     changegroup.writebundle(ui, bundle, bundlepath, bundletype)
 
 @command('debugignore', [], '')
@@ -2233,7 +2319,7 @@
     if includepat is not None:
         ui.write("%s\n" % includepat)
     else:
-        raise util.Abort(_("no ignore patterns found"))
+        raise error.Abort(_("no ignore patterns found"))
 
 @command('debugindex',
     [('c', 'changelog', False, _('open changelog')),
@@ -2247,7 +2333,7 @@
     r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
     format = opts.get('format', 0)
     if format not in (0, 1):
-        raise util.Abort(_("unknown format %d") % format)
+        raise error.Abort(_("unknown format %d") % format)
 
     generaldelta = r.version & revlog.REVLOGGENERALDELTA
     if generaldelta:
@@ -2333,7 +2419,7 @@
     ui.status(_("checking encoding (%s)...\n") % encoding.encoding)
     try:
         encoding.fromlocal("test")
-    except util.Abort as inst:
+    except error.Abort as inst:
         ui.write(" %s\n" % inst)
         ui.write(_(" (check that your locale is properly set)\n"))
         problems += 1
@@ -2399,7 +2485,7 @@
     ui.status(_("checking username...\n"))
     try:
         ui.username()
-    except util.Abort as e:
+    except error.Abort as e:
         ui.write(" %s\n" % e)
         ui.write(_(" (specify a username in your configuration file)\n"))
         problems += 1
@@ -2421,7 +2507,7 @@
     """
     repo = hg.peer(ui, opts, repopath)
     if not repo.capable('known'):
-        raise util.Abort("known() not supported by target repository")
+        raise error.Abort("known() not supported by target repository")
     flags = repo.known([bin(s) for s in ids])
     ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
 
@@ -2430,6 +2516,75 @@
     '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
     debugnamecomplete(ui, repo, *args)
 
+@command('debugmergestate', [], '')
+def debugmergestate(ui, repo, *args):
+    """print merge state
+
+    Use --verbose to print out information about whether v1 or v2 merge state
+    was chosen."""
+    def printrecords(version):
+        ui.write(('* version %s records\n') % version)
+        if version == 1:
+            records = v1records
+        else:
+            records = v2records
+
+        for rtype, record in records:
+            # pretty print some record types
+            if rtype == 'L':
+                ui.write(('local: %s\n') % record)
+            elif rtype == 'O':
+                ui.write(('other: %s\n') % record)
+            elif rtype == 'm':
+                driver, mdstate = record.split('\0', 1)
+                ui.write(('merge driver: %s (state "%s")\n')
+                         % (driver, mdstate))
+            elif rtype in 'FD':
+                r = record.split('\0')
+                f, state, hash, lfile, afile, anode, ofile = r[0:7]
+                if version == 1:
+                    onode = 'not stored in v1 format'
+                    flags = r[7]
+                else:
+                    onode, flags = r[7:9]
+                ui.write(('file: %s (state "%s", hash %s)\n')
+                         % (f, state, hash))
+                ui.write(('  local path: %s (flags "%s")\n') % (lfile, flags))
+                ui.write(('  ancestor path: %s (node %s)\n') % (afile, anode))
+                ui.write(('  other path: %s (node %s)\n') % (ofile, onode))
+            else:
+                ui.write(('unrecognized entry: %s\t%s\n')
+                         % (rtype, record.replace('\0', '\t')))
+
+    ms = mergemod.mergestate(repo)
+
+    # sort so that reasonable information is on top
+    v1records = ms._readrecordsv1()
+    v2records = ms._readrecordsv2()
+    order = 'LOm'
+    def key(r):
+        idx = order.find(r[0])
+        if idx == -1:
+            return (1, r[1])
+        else:
+            return (0, idx)
+    v1records.sort(key=key)
+    v2records.sort(key=key)
+
+    if not v1records and not v2records:
+        ui.write(('no merge state found\n'))
+    elif not v2records:
+        ui.note(('no version 2 merge state\n'))
+        printrecords(1)
+    elif ms._v1v2match(v1records, v2records):
+        ui.note(('v1 and v2 states match: using v2\n'))
+        printrecords(2)
+    else:
+        ui.note(('v1 and v2 states mismatch: using v1\n'))
+        printrecords(1)
+        if ui.verbose:
+            printrecords(2)
+
 @command('debugnamecomplete', [], _('NAME...'))
 def debugnamecomplete(ui, repo, *args):
     '''complete "names" - tags, open branch names, bookmark names'''
@@ -2544,12 +2699,12 @@
                 raise TypeError()
             return n
         except TypeError:
-            raise util.Abort('changeset references must be full hexadecimal '
+            raise error.Abort('changeset references must be full hexadecimal '
                              'node identifiers')
 
     if precursor is not None:
         if opts['rev']:
-            raise util.Abort('cannot select revision when creating marker')
+            raise error.Abort('cannot select revision when creating marker')
         metadata = {}
         metadata['user'] = opts['user'] or ui.username()
         succs = tuple(parsenodeid(succ) for succ in successors)
@@ -2566,7 +2721,7 @@
                 parents = None
                 if opts['record_parents']:
                     if prec not in repo.unfiltered():
-                        raise util.Abort('cannot used --record-parents on '
+                        raise error.Abort('cannot used --record-parents on '
                                          'unknown changesets')
                     parents = repo.unfiltered()[prec].parents()
                     parents = tuple(p.node() for p in parents)
@@ -2575,7 +2730,7 @@
                                      metadata=metadata)
                 tr.close()
             except ValueError as exc:
-                raise util.Abort(_('bad obsmarker input: %s') % exc)
+                raise error.Abort(_('bad obsmarker input: %s') % exc)
             finally:
                 tr.release()
         finally:
@@ -2700,9 +2855,12 @@
               pa.distance(pb), rel))
 
 @command('debugrebuilddirstate|debugrebuildstate',
-    [('r', 'rev', '', _('revision to rebuild to'), _('REV'))],
+    [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
+     ('', 'minimal', None, _('only rebuild files that are inconsistent with '
+                             'the working copy parent')),
+    ],
     _('[-r REV]'))
-def debugrebuilddirstate(ui, repo, rev):
+def debugrebuilddirstate(ui, repo, rev, **opts):
     """rebuild the dirstate as it would look like for the given revision
 
     If no revision is specified the first current parent will be used.
@@ -2711,13 +2869,33 @@
     The actual working directory content or existing dirstate
     information such as adds or removes is not considered.
 
+    ``minimal`` will only rebuild the dirstate status for files that claim to be
+    tracked but are not in the parent manifest, or that exist in the parent
+    manifest but are not in the dirstate. It will not change adds, removes, or
+    modified files that are in the working copy parent.
+
     One use of this command is to make the next :hg:`status` invocation
     check the actual file content.
     """
     ctx = scmutil.revsingle(repo, rev)
     wlock = repo.wlock()
     try:
-        repo.dirstate.rebuild(ctx.node(), ctx.manifest())
+        dirstate = repo.dirstate
+
+        # See command doc for what minimal does.
+        if opts.get('minimal'):
+            dirstatefiles = set(dirstate)
+            ctxfiles = set(ctx.manifest().keys())
+            for file in (dirstatefiles | ctxfiles):
+                indirstate = file in dirstatefiles
+                inctx = file in ctxfiles
+
+                if indirstate and not inctx and dirstate[file] != 'a':
+                    dirstate.drop(file)
+                elif inctx and not indirstate:
+                    dirstate.normallookup(file)
+        else:
+            dirstate.rebuild(ctx.node(), ctx.manifest())
     finally:
         wlock.release()
 
@@ -2933,7 +3111,7 @@
     expansion.
     """
     if ui.verbose:
-        tree = revset.parse(expr)
+        tree = revset.parse(expr, lookup=repo.__contains__)
         ui.note(revset.prettyformat(tree), "\n")
         newtree = revset.findaliases(ui, tree)
         if newtree != tree:
@@ -2945,7 +3123,7 @@
         if opts["optimize"]:
             weight, optimizedtree = revset.optimize(newtree, True)
             ui.note("* optimized:\n", revset.prettyformat(optimizedtree), "\n")
-    func = revset.match(ui, expr)
+    func = revset.match(ui, expr, repo)
     revs = func(repo)
     if ui.verbose:
         ui.note("* set:\n", revset.prettyformatset(revs), "\n")
@@ -3177,7 +3355,7 @@
 
     if revs and change:
         msg = _('cannot specify --rev and --change at the same time')
-        raise util.Abort(msg)
+        raise error.Abort(msg)
     elif change:
         node2 = scmutil.revsingle(repo, change, None).node()
         node1 = repo[node2].p1().node()
@@ -3265,7 +3443,7 @@
         changesets = ['.']
     revs = scmutil.revrange(repo, changesets)
     if not revs:
-        raise util.Abort(_("export requires at least one changeset"))
+        raise error.Abort(_("export requires at least one changeset"))
     if len(revs) > 1:
         ui.note(_('exporting patches:\n'))
     else:
@@ -3369,7 +3547,7 @@
     """
 
     if not pats:
-        raise util.Abort(_('no files specified'))
+        raise error.Abort(_('no files specified'))
 
     m = scmutil.match(repo[None], pats, opts)
     rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0]
@@ -3460,7 +3638,7 @@
     if opts['continue']:
         cont = True
         if revs:
-            raise util.Abort(_("can't specify --continue and revisions"))
+            raise error.Abort(_("can't specify --continue and revisions"))
         # read in unfinished revisions
         try:
             nodes = repo.vfs.read('graftstate').splitlines()
@@ -3468,12 +3646,12 @@
         except IOError as inst:
             if inst.errno != errno.ENOENT:
                 raise
-            raise util.Abort(_("no graft state found, can't continue"))
+            raise error.Abort(_("no graft state found, can't continue"))
     else:
         cmdutil.checkunfinished(repo)
         cmdutil.bailifchanged(repo)
         if not revs:
-            raise util.Abort(_('no revisions specified'))
+            raise error.Abort(_('no revisions specified'))
         revs = scmutil.revrange(repo, revs)
 
     skipped = set()
@@ -3596,7 +3774,7 @@
                     # write out state for --continue
                     nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
                     repo.vfs.write('graftstate', ''.join(nodelines))
-                    raise util.Abort(
+                    raise error.Abort(
                         _("unresolved conflicts, can't continue"),
                         hint=_('use hg resolve and hg graft --continue'))
             else:
@@ -3915,9 +4093,9 @@
 @command('help',
     [('e', 'extension', None, _('show only help for extensions')),
      ('c', 'command', None, _('show only help for commands')),
-     ('k', 'keyword', '', _('show topics matching keyword')),
+     ('k', 'keyword', None, _('show topics matching keyword')),
      ],
-    _('[-ec] [TOPIC]'),
+    _('[-eck] [TOPIC]'),
     norepo=True)
 def help_(ui, name=None, **opts):
     """show help for a given topic or a help overview
@@ -3948,13 +4126,18 @@
     section = None
     if name and '.' in name:
         name, section = name.split('.', 1)
+        section = section.lower()
 
     text = help.help_(ui, name, **opts)
 
     formatted, pruned = minirst.format(text, textwidth, keep=keep,
                                        section=section)
-    if section and not formatted:
-        raise util.Abort(_("help section not found"))
+
+    # We could have been given a weird ".foo" section without a name
+    # to look for, or we could have simply failed to found "foo.bar"
+    # because bar isn't a section of foo
+    if section and not (formatted and name):
+        raise error.Abort(_("help section not found"))
 
     if 'verbose' in pruned:
         keep.append('omitted')
@@ -4011,7 +4194,7 @@
     """
 
     if not repo and not source:
-        raise util.Abort(_("there is no Mercurial repository here "
+        raise error.Abort(_("there is no Mercurial repository here "
                            "(.hg not found)"))
 
     if ui.debugflag:
@@ -4030,7 +4213,7 @@
 
     if not repo:
         if num or branch or tags:
-            raise util.Abort(
+            raise error.Abort(
                 _("can't query remote revision number, branch, or tags"))
         if not rev and revs:
             rev = revs[0]
@@ -4223,7 +4406,7 @@
     """
 
     if not patch1:
-        raise util.Abort(_('need at least one patch to import'))
+        raise error.Abort(_('need at least one patch to import'))
 
     patches = (patch1,) + patches
 
@@ -4233,19 +4416,19 @@
 
     update = not opts.get('bypass')
     if not update and opts.get('no_commit'):
-        raise util.Abort(_('cannot use --no-commit with --bypass'))
+        raise error.Abort(_('cannot use --no-commit with --bypass'))
     try:
         sim = float(opts.get('similarity') or 0)
     except ValueError:
-        raise util.Abort(_('similarity must be a number'))
+        raise error.Abort(_('similarity must be a number'))
     if sim < 0 or sim > 100:
-        raise util.Abort(_('similarity must be between 0 and 100'))
+        raise error.Abort(_('similarity must be between 0 and 100'))
     if sim and not update:
-        raise util.Abort(_('cannot use --similarity with --bypass'))
+        raise error.Abort(_('cannot use --similarity with --bypass'))
     if opts.get('exact') and opts.get('edit'):
-        raise util.Abort(_('cannot use --exact with --edit'))
+        raise error.Abort(_('cannot use --exact with --edit'))
     if opts.get('exact') and opts.get('prefix'):
-        raise util.Abort(_('cannot use --exact with --prefix'))
+        raise error.Abort(_('cannot use --exact with --prefix'))
 
     if update:
         cmdutil.checkunfinished(repo)
@@ -4261,10 +4444,11 @@
     try:
         try:
             wlock = repo.wlock()
-            dsguard = cmdutil.dirstateguard(repo, 'import')
             if not opts.get('no_commit'):
                 lock = repo.lock()
                 tr = repo.transaction('import')
+            else:
+                dsguard = cmdutil.dirstateguard(repo, 'import')
             parents = repo.parents()
             for patchurl in patches:
                 if patchurl == '-':
@@ -4296,13 +4480,14 @@
                         break
 
                 if not haspatch:
-                    raise util.Abort(_('%s: no diffs found') % patchurl)
+                    raise error.Abort(_('%s: no diffs found') % patchurl)
 
             if tr:
                 tr.close()
             if msgs:
                 repo.savecommitmessage('\n* * *\n'.join(msgs))
-            dsguard.close()
+            if dsguard:
+                dsguard.close()
             return ret
         finally:
             # TODO: get rid of this meaningless try/finally enclosing.
@@ -4391,7 +4576,7 @@
         return 0
 
     if opts.get('bundle') and opts.get('subrepos'):
-        raise util.Abort(_('cannot combine --bundle and --subrepos'))
+        raise error.Abort(_('cannot combine --bundle and --subrepos'))
 
     if opts.get('bookmarks'):
         source, branches = hg.parseurl(ui.expandpath(source),
@@ -4657,7 +4842,7 @@
 
     if opts.get('all'):
         if rev or node:
-            raise util.Abort(_("can't specify a revision with --all"))
+            raise error.Abort(_("can't specify a revision with --all"))
 
         res = []
         prefix = "data/"
@@ -4678,7 +4863,7 @@
         return
 
     if rev and node:
-        raise util.Abort(_("please specify just one revision"))
+        raise error.Abort(_("please specify just one revision"))
 
     if not node:
         node = rev
@@ -4733,65 +4918,15 @@
     """
 
     if opts.get('rev') and node:
-        raise util.Abort(_("please specify just one revision"))
+        raise error.Abort(_("please specify just one revision"))
     if not node:
         node = opts.get('rev')
 
     if node:
         node = scmutil.revsingle(repo, node).node()
 
-    if not node and repo._activebookmark:
-        bmheads = repo.bookmarkheads(repo._activebookmark)
-        curhead = repo[repo._activebookmark].node()
-        if len(bmheads) == 2:
-            if curhead == bmheads[0]:
-                node = bmheads[1]
-            else:
-                node = bmheads[0]
-        elif len(bmheads) > 2:
-            raise util.Abort(_("multiple matching bookmarks to merge - "
-                "please merge with an explicit rev or bookmark"),
-                hint=_("run 'hg heads' to see all heads"))
-        elif len(bmheads) <= 1:
-            raise util.Abort(_("no matching bookmark to merge - "
-                "please merge with an explicit rev or bookmark"),
-                hint=_("run 'hg heads' to see all heads"))
-
-    if not node and not repo._activebookmark:
-        branch = repo[None].branch()
-        bheads = repo.branchheads(branch)
-        nbhs = [bh for bh in bheads if not repo[bh].bookmarks()]
-
-        if len(nbhs) > 2:
-            raise util.Abort(_("branch '%s' has %d heads - "
-                               "please merge with an explicit rev")
-                             % (branch, len(bheads)),
-                             hint=_("run 'hg heads .' to see heads"))
-
-        parent = repo.dirstate.p1()
-        if len(nbhs) <= 1:
-            if len(bheads) > 1:
-                raise util.Abort(_("heads are bookmarked - "
-                                   "please merge with an explicit rev"),
-                                 hint=_("run 'hg heads' to see all heads"))
-            if len(repo.heads()) > 1:
-                raise util.Abort(_("branch '%s' has one head - "
-                                   "please merge with an explicit rev")
-                                 % branch,
-                                 hint=_("run 'hg heads' to see all heads"))
-            msg, hint = _('nothing to merge'), None
-            if parent != repo.lookup(branch):
-                hint = _("use 'hg update' instead")
-            raise util.Abort(msg, hint=hint)
-
-        if parent not in bheads:
-            raise util.Abort(_('working directory not at a head revision'),
-                             hint=_("use 'hg update' or merge with an "
-                                    "explicit revision"))
-        if parent == nbhs[0]:
-            node = nbhs[-1]
-        else:
-            node = nbhs[0]
+    if not node:
+        node = repo[destutil.destmerge(repo)].node()
 
     if opts.get('preview'):
         # find nodes that are ancestors of p2 but not of p1
@@ -4913,7 +5048,7 @@
     if file_:
         m = scmutil.match(ctx, (file_,), opts)
         if m.anypats() or len(m.files()) != 1:
-            raise util.Abort(_('can only specify an explicit filename'))
+            raise error.Abort(_('can only specify an explicit filename'))
         file_ = m.files()[0]
         filenodes = []
         for cp in ctx.parents():
@@ -4924,7 +5059,7 @@
             except error.LookupError:
                 pass
         if not filenodes:
-            raise util.Abort(_("'%s' not found in manifest!") % file_)
+            raise error.Abort(_("'%s' not found in manifest!") % file_)
         p = []
         for fn in filenodes:
             fctx = repo.filectx(file_, fileid=fn)
@@ -5004,8 +5139,7 @@
 
         public < draft < secret
 
-    Returns 0 on success, 1 if no phases were changed or some could not
-    be changed.
+    Returns 0 on success, 1 if some phases could not be changed.
 
     (For more information about the phases concept, see :hg:`help phases`.)
     """
@@ -5014,7 +5148,7 @@
     for idx, name in enumerate(phases.phasenames):
         if opts[name]:
             if targetphase is not None:
-                raise util.Abort(_('only one phase can be specified'))
+                raise error.Abort(_('only one phase can be specified'))
             targetphase = idx
 
     # look for specified revision
@@ -5041,7 +5175,7 @@
             tr = repo.transaction("phase")
             # set phase
             if not revs:
-                raise util.Abort(_('empty revision set'))
+                raise error.Abort(_('empty revision set'))
             nodes = [repo[r].node() for r in revs]
             # moving revision from public to draft may hide them
             # We have to check result on an unfiltered repository
@@ -5074,17 +5208,20 @@
                 ui.note(msg)
         else:
             ui.warn(_('no phases changed\n'))
-            ret = 1
     return ret
 
 def postincoming(ui, repo, modheads, optupdate, checkout):
     if modheads == 0:
         return
     if optupdate:
-        checkout, movemarkfrom = bookmarks.calculateupdate(ui, repo, checkout)
         try:
+            brev = checkout
+            movemarkfrom = None
+            if not checkout:
+                updata =  destutil.destupdate(repo)
+                checkout, movemarkfrom, brev = updata
             ret = hg.update(repo, checkout)
-        except util.Abort as inst:
+        except error.UpdateAbort as inst:
             ui.warn(_("not updating: %s\n") % str(inst))
             if inst.hint:
                 ui.warn(_("(%s)\n") % inst.hint)
@@ -5155,7 +5292,7 @@
             pullopargs['remotebookmarks'] = remotebookmarks
             for b in opts['bookmark']:
                 if b not in remotebookmarks:
-                    raise util.Abort(_('remote bookmark %s not found!') % b)
+                    raise error.Abort(_('remote bookmark %s not found!') % b)
                 revs.append(remotebookmarks[b])
 
         if revs:
@@ -5173,8 +5310,9 @@
             except error.CapabilityError:
                 err = _("other repository doesn't support revision lookup, "
                         "so a rev cannot be specified.")
-                raise util.Abort(err)
-
+                raise error.Abort(err)
+
+        pullopargs.update(opts.get('opargs', {}))
         modheads = exchange.pull(repo, other, heads=revs,
                                  force=opts.get('force'),
                                  bookmarks=opts.get('bookmark', ()),
@@ -5251,23 +5389,19 @@
                 # this lets simultaneous -r, -b options continue working
                 opts.setdefault('rev', []).append("null")
 
-    dest = ui.expandpath(dest or 'default-push', dest or 'default')
-    dest, branches = hg.parseurl(dest, opts.get('branch'))
+    path = ui.paths.getpath(dest, default='default')
+    if not path:
+        raise error.Abort(_('default repository not configured!'),
+                         hint=_('see the "path" section in "hg help config"'))
+    dest, branches = path.pushloc, (path.branch, opts.get('branch') or [])
     ui.status(_('pushing to %s\n') % util.hidepassword(dest))
     revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
-    try:
-        other = hg.peer(repo, opts, dest)
-    except error.RepoError:
-        if dest == "default-push":
-            raise util.Abort(_("default repository not configured!"),
-                    hint=_('see the "path" section in "hg help config"'))
-        else:
-            raise
+    other = hg.peer(repo, opts, dest)
 
     if revs:
         revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
         if not revs:
-            raise util.Abort(_("specified revisions evaluate to an empty set"),
+            raise error.Abort(_("specified revisions evaluate to an empty set"),
                              hint=_("use different revision arguments"))
 
     repo._subtoppath = dest
@@ -5283,7 +5417,8 @@
         del repo._subtoppath
     pushop = exchange.push(repo, other, opts.get('force'), revs=revs,
                            newbranch=opts.get('new_branch'),
-                           bookmarks=opts.get('bookmark', ()))
+                           bookmarks=opts.get('bookmark', ()),
+                           opargs=opts.get('opargs'))
 
     result = not pushop.cgresult
 
@@ -5357,7 +5492,7 @@
 
     after, force = opts.get('after'), opts.get('force')
     if not pats and not after:
-        raise util.Abort(_('no files specified'))
+        raise error.Abort(_('no files specified'))
 
     m = scmutil.match(repo[None], pats, opts)
     subrepos = opts.get('subrepos')
@@ -5441,12 +5576,12 @@
         [opts.get(o) for o in 'all mark unmark list no_status'.split()]
 
     if (show and (mark or unmark)) or (mark and unmark):
-        raise util.Abort(_("too many options specified"))
+        raise error.Abort(_("too many options specified"))
     if pats and all:
-        raise util.Abort(_("can't specify --all and patterns"))
+        raise error.Abort(_("can't specify --all and patterns"))
     if not (all or pats or show or mark or unmark):
-        raise util.Abort(_('no files or directories specified'),
-                         hint=('use --all to remerge all files'))
+        raise error.Abort(_('no files or directories specified'),
+                         hint=('use --all to re-merge all unresolved files'))
 
     if show:
         fm = ui.formatter('resolve', opts)
@@ -5455,7 +5590,8 @@
         for f in ms:
             if not m(f):
                 continue
-            l = 'resolve.' + {'u': 'unresolved', 'r': 'resolved'}[ms[f]]
+            l = 'resolve.' + {'u': 'unresolved', 'r': 'resolved',
+                              'd': 'driverresolved'}[ms[f]]
             fm.startitem()
             fm.condwrite(not nostatus, 'status', '%s ', ms[f].upper(), label=l)
             fm.write('path', '%s\n', f, label=l)
@@ -5467,54 +5603,112 @@
         ms = mergemod.mergestate(repo)
 
         if not (ms.active() or repo.dirstate.p2() != nullid):
-            raise util.Abort(
+            raise error.Abort(
                 _('resolve command not applicable when not merging'))
 
-        m = scmutil.match(repo[None], pats, opts)
+        wctx = repo[None]
+
+        if ms.mergedriver and ms.mdstate() == 'u':
+            proceed = mergemod.driverpreprocess(repo, ms, wctx)
+            ms.commit()
+            # allow mark and unmark to go through
+            if not mark and not unmark and not proceed:
+                return 1
+
+        m = scmutil.match(wctx, pats, opts)
         ret = 0
         didwork = False
-
+        runconclude = False
+
+        tocomplete = []
         for f in ms:
             if not m(f):
                 continue
 
             didwork = True
 
+            # don't let driver-resolved files be marked, and run the conclude
+            # step if asked to resolve
+            if ms[f] == "d":
+                exact = m.exact(f)
+                if mark:
+                    if exact:
+                        ui.warn(_('not marking %s as it is driver-resolved\n')
+                                % f)
+                elif unmark:
+                    if exact:
+                        ui.warn(_('not unmarking %s as it is driver-resolved\n')
+                                % f)
+                else:
+                    runconclude = True
+                continue
+
             if mark:
                 ms.mark(f, "r")
             elif unmark:
                 ms.mark(f, "u")
             else:
-                wctx = repo[None]
-
                 # backup pre-resolve (merge uses .orig for its own purposes)
                 a = repo.wjoin(f)
                 util.copyfile(a, a + ".resolve")
 
                 try:
-                    # resolve file
+                    # preresolve file
                     ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
                                  'resolve')
-                    if ms.resolve(f, wctx):
+                    complete, r = ms.preresolve(f, wctx)
+                    if not complete:
+                        tocomplete.append(f)
+                    elif r:
                         ret = 1
                 finally:
                     ui.setconfig('ui', 'forcemerge', '', 'resolve')
                     ms.commit()
 
                 # replace filemerge's .orig file with our resolve file
+                # for files in tocomplete, ms.resolve will not overwrite
+                # .orig -- only preresolve does
                 util.rename(a + ".resolve", a + ".orig")
 
+        for f in tocomplete:
+            try:
+                # resolve file
+                ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
+                             'resolve')
+                r = ms.resolve(f, wctx)
+                if r:
+                    ret = 1
+            finally:
+                ui.setconfig('ui', 'forcemerge', '', 'resolve')
+                ms.commit()
+
         ms.commit()
 
         if not didwork and pats:
             ui.warn(_("arguments do not match paths that need resolving\n"))
+        elif ms.mergedriver and ms.mdstate() != 's':
+            # run conclude step when either a driver-resolved file is requested
+            # or there are no driver-resolved files
+            # we can't use 'ret' to determine whether any files are unresolved
+            # because we might not have tried to resolve some
+            if ((runconclude or not list(ms.driverresolved()))
+                and not list(ms.unresolved())):
+                proceed = mergemod.driverconclude(repo, ms, wctx)
+                ms.commit()
+                if not proceed:
+                    return 1
 
     finally:
         wlock.release()
 
     # Nudge users into finishing an unfinished operation
-    if not list(ms.unresolved()):
+    unresolvedf = list(ms.unresolved())
+    driverresolvedf = list(ms.driverresolved())
+    if not unresolvedf and not driverresolvedf:
         ui.status(_('(no more unresolved files)\n'))
+    elif not unresolvedf:
+        ui.status(_('(no more unresolved files -- '
+                    'run "hg resolve --all" to conclude)\n'))
 
     return ret
 
@@ -5555,18 +5749,21 @@
 
     See :hg:`help dates` for a list of formats valid for -d/--date.
 
+    See :hg:`help backout` for a way to reverse the effect of an
+    earlier changeset.
+
     Returns 0 on success.
     """
 
     if opts.get("date"):
         if opts.get("rev"):
-            raise util.Abort(_("you can't specify a revision and a date"))
+            raise error.Abort(_("you can't specify a revision and a date"))
         opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
 
     parent, p2 = repo.dirstate.parents()
     if not opts.get('rev') and p2 != nullid:
         # revert after merge is a trap for new users (issue2915)
-        raise util.Abort(_('uncommitted merge with no revision specified'),
+        raise error.Abort(_('uncommitted merge with no revision specified'),
                          hint=_('use "hg update" or see "hg help revert"'))
 
     ctx = scmutil.revsingle(repo, opts.get('rev'))
@@ -5577,7 +5774,7 @@
         if p2 != nullid:
             hint = _("uncommitted merge, use --all to discard all changes,"
                      " or 'hg update -C .' to abort the merge")
-            raise util.Abort(msg, hint=hint)
+            raise error.Abort(msg, hint=hint)
         dirty = any(repo.status())
         node = ctx.node()
         if node != parent:
@@ -5591,7 +5788,7 @@
             hint = _("uncommitted changes, use --all to discard all changes")
         else:
             hint = _("use --all to revert all files")
-        raise util.Abort(msg, hint=hint)
+        raise error.Abort(msg, hint=hint)
 
     return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
 
@@ -5703,7 +5900,7 @@
     """
 
     if opts["stdio"] and opts["cmdserver"]:
-        raise util.Abort(_("cannot use --stdio with --cmdserver"))
+        raise error.Abort(_("cannot use --stdio with --cmdserver"))
 
     if opts["stdio"]:
         if repo is None:
@@ -5713,6 +5910,7 @@
         s.serve_forever()
 
     if opts["cmdserver"]:
+        import commandserver
         service = commandserver.createservice(ui, repo, opts)
         return cmdutil.service(opts, initfn=service.init, runfn=service.run)
 
@@ -5871,7 +6069,7 @@
 
     if revs and change:
         msg = _('cannot specify --rev and --change at the same time')
-        raise util.Abort(msg)
+        raise error.Abort(msg)
     elif change:
         node2 = scmutil.revsingle(repo, change, None).node()
         node1 = repo[node2].p1().node()
@@ -6223,14 +6421,14 @@
         rev_ = "."
         names = [t.strip() for t in (name1,) + names]
         if len(names) != len(set(names)):
-            raise util.Abort(_('tag names must be unique'))
+            raise error.Abort(_('tag names must be unique'))
         for n in names:
             scmutil.checknewlabel(repo, n, 'tag')
             if not n:
-                raise util.Abort(_('tag names cannot consist entirely of '
+                raise error.Abort(_('tag names cannot consist entirely of '
                                    'whitespace'))
         if opts.get('rev') and opts.get('remove'):
-            raise util.Abort(_("--rev and --remove are incompatible"))
+            raise error.Abort(_("--rev and --remove are incompatible"))
         if opts.get('rev'):
             rev_ = opts['rev']
         message = opts.get('message')
@@ -6242,28 +6440,28 @@
 
             for n in names:
                 if not repo.tagtype(n):
-                    raise util.Abort(_("tag '%s' does not exist") % n)
+                    raise error.Abort(_("tag '%s' does not exist") % n)
                 if repo.tagtype(n) != expectedtype:
                     if expectedtype == 'global':
-                        raise util.Abort(_("tag '%s' is not a global tag") % n)
+                        raise error.Abort(_("tag '%s' is not a global tag") % n)
                     else:
-                        raise util.Abort(_("tag '%s' is not a local tag") % n)
-            rev_ = nullid
+                        raise error.Abort(_("tag '%s' is not a local tag") % n)
+            rev_ = 'null'
             if not message:
                 # we don't translate commit messages
                 message = 'Removed tag %s' % ', '.join(names)
         elif not opts.get('force'):
             for n in names:
                 if n in repo.tags():
-                    raise util.Abort(_("tag '%s' already exists "
+                    raise error.Abort(_("tag '%s' already exists "
                                        "(use -f to force)") % n)
         if not opts.get('local'):
             p1, p2 = repo.dirstate.parents()
             if p2 != nullid:
-                raise util.Abort(_('uncommitted merge'))
+                raise error.Abort(_('uncommitted merge'))
             bheads = repo.branchheads()
             if not opts.get('force') and bheads and p1 not in bheads:
-                raise util.Abort(_('not at a branch head (use -f to force)'))
+                raise error.Abort(_('not at a branch head (use -f to force)'))
         r = scmutil.revsingle(repo, rev_).node()
 
         if not message:
@@ -6284,7 +6482,7 @@
         # don't allow tagging the null rev
         if (not opts.get('remove') and
             scmutil.revsingle(repo, rev_).rev() == nullrev):
-            raise util.Abort(_("cannot tag null revision"))
+            raise error.Abort(_("cannot tag null revision"))
 
         repo.tag(names, r, message, opts.get('local'), opts.get('user'), date,
                  editor=editor)
@@ -6370,23 +6568,28 @@
             if isinstance(gen, bundle2.unbundle20):
                 tr = repo.transaction('unbundle')
                 try:
-                    op = bundle2.processbundle(repo, gen, lambda: tr)
+                    op = bundle2.applybundle(repo, gen, tr, source='unbundle',
+                                             url='bundle:' + fname)
                     tr.close()
-                except error.UnsupportedPartError as exc:
-                    raise util.Abort(_('%s: unknown bundle feature, %s')
+                except error.BundleUnknownFeatureError as exc:
+                    raise error.Abort(_('%s: unknown bundle feature, %s')
                                      % (fname, exc),
-                                     hint=_("see https://mercurial.selenic.com/"
+                                     hint=_("see https://mercurial-scm.org/"
                                             "wiki/BundleFeature for more "
                                             "information"))
                 finally:
                     if tr:
                         tr.release()
-                changes = [r.get('result', 0)
+                changes = [r.get('return', 0)
                            for r in op.records['changegroup']]
                 modheads = changegroup.combineresults(changes)
+            elif isinstance(gen, streamclone.streamcloneapplier):
+                raise error.Abort(
+                        _('packed bundles cannot be applied with '
+                          '"hg unbundle"'),
+                        hint=_('use "hg debugapplystreamclonebundle"'))
             else:
-                modheads = changegroup.addchangegroup(repo, gen, 'unbundle',
-                                                      'bundle:' + fname)
+                modheads = gen.apply(repo, 'unbundle', 'bundle:' + fname)
     finally:
         lock.release()
 
@@ -6451,8 +6654,9 @@
 
     Returns 0 on success, 1 if there are unresolved files.
     """
+    movemarkfrom = None
     if rev and node:
-        raise util.Abort(_("please specify just one revision"))
+        raise error.Abort(_("please specify just one revision"))
 
     if rev is None or rev == '':
         rev = node
@@ -6461,25 +6665,24 @@
     try:
         cmdutil.clearunfinished(repo)
 
-        # with no argument, we also move the active bookmark, if any
-        rev, movemarkfrom = bookmarks.calculateupdate(ui, repo, rev)
+        if date:
+            if rev is not None:
+                raise error.Abort(_("you can't specify a revision and a date"))
+            rev = cmdutil.finddate(ui, repo, date)
 
         # if we defined a bookmark, we have to remember the original name
         brev = rev
         rev = scmutil.revsingle(repo, rev, rev).rev()
 
         if check and clean:
-            raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
-
-        if date:
-            if rev is not None:
-                raise util.Abort(_("you can't specify a revision and a date"))
-            rev = cmdutil.finddate(ui, repo, date)
+            raise error.Abort(_("cannot specify both -c/--check and -C/--clean")
+                             )
 
         if check:
             cmdutil.bailifchanged(repo, merge=False)
-            if rev is None:
-                rev = repo[repo[None].branch()].rev()
+        if rev is None:
+            updata =  destutil.destupdate(repo, clean=clean, check=check)
+            rev, movemarkfrom, brev = updata
 
         repo.ui.setconfig('ui', 'forcemerge', tool, 'update')
 
@@ -6489,7 +6692,9 @@
             ret = hg.update(repo, rev)
 
         if not ret and movemarkfrom:
-            if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
+            if movemarkfrom == repo['.'].node():
+                pass # no-op update
+            elif bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
                 ui.status(_("updating bookmark %s\n") % repo._activebookmark)
             else:
                 # this can happen with a non-linear update
@@ -6520,7 +6725,7 @@
     the changelog, manifest, and tracked files, as well as the
     integrity of their crosslinks and indices.
 
-    Please see http://mercurial.selenic.com/wiki/RepositoryCorruption
+    Please see https://mercurial-scm.org/wiki/RepositoryCorruption
     for more information about recovery from corruption of the
     repository.
 
@@ -6534,7 +6739,7 @@
     ui.write(_("Mercurial Distributed SCM (version %s)\n")
              % util.version())
     ui.status(_(
-        "(see http://mercurial.selenic.com for more information)\n"
+        "(see https://mercurial-scm.org for more information)\n"
         "\nCopyright (C) 2005-2015 Matt Mackall and others\n"
         "This is free software; see the source for copying conditions. "
         "There is NO\nwarranty; "
--- a/mercurial/commandserver.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/commandserver.py	Tue Oct 20 15:59:10 2015 -0500
@@ -8,7 +8,7 @@
 from i18n import _
 import struct
 import sys, os, errno, traceback, SocketServer
-import dispatch, encoding, util
+import dispatch, encoding, util, error
 
 logfile = None
 
@@ -222,7 +222,7 @@
             else:
                 # clients are expected to check what commands are supported by
                 # looking at the servers capabilities
-                raise util.Abort(_('unknown command %s') % cmd)
+                raise error.Abort(_('unknown command %s') % cmd)
 
         return cmd != ''
 
@@ -301,7 +301,7 @@
                 sv.serve()
             # handle exceptions that may be raised by command server. most of
             # known exceptions are caught by dispatch.
-            except util.Abort as inst:
+            except error.Abort as inst:
                 ui.warn(_('abort: %s\n') % inst)
             except IOError as inst:
                 if inst.errno != errno.EPIPE:
@@ -323,9 +323,9 @@
         self.repo = repo
         self.address = opts['address']
         if not util.safehasattr(SocketServer, 'UnixStreamServer'):
-            raise util.Abort(_('unsupported platform'))
+            raise error.Abort(_('unsupported platform'))
         if not self.address:
-            raise util.Abort(_('no socket path specified with --address'))
+            raise error.Abort(_('no socket path specified with --address'))
 
     def init(self):
         class cls(SocketServer.ForkingMixIn, SocketServer.UnixStreamServer):
@@ -351,4 +351,4 @@
     try:
         return _servicemap[mode](ui, repo, opts)
     except KeyError:
-        raise util.Abort(_('unknown mode %s') % mode)
+        raise error.Abort(_('unknown mode %s') % mode)
--- a/mercurial/config.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/config.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,9 +5,16 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-from i18n import _
-import error, util
-import os, errno
+from __future__ import absolute_import
+
+import errno
+import os
+
+from .i18n import _
+from . import (
+    error,
+    util,
+)
 
 class config(object):
     def __init__(self, data=None, includepaths=[]):
--- a/mercurial/context.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/context.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,6 +5,8 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
+import re
+
 from node import nullid, nullrev, wdirid, short, hex, bin
 from i18n import _
 import mdiff, error, util, scmutil, subrepo, patch, encoding, phases
@@ -22,6 +24,8 @@
 # dirty in the working copy.
 _newnode = '!' * 21
 
+nonascii = re.compile(r'[^\x21-\x7f]').search
+
 class basectx(object):
     """A basectx object represents the common logic for its children:
     changectx: read-only context that is already present in the repo,
@@ -466,7 +470,7 @@
                 msg = _("working directory has unknown parent '%s'!")
                 raise error.Abort(msg % short(changeid))
             try:
-                if len(changeid) == 20:
+                if len(changeid) == 20 and nonascii(changeid):
                     changeid = hex(changeid)
             except TypeError:
                 pass
@@ -1060,7 +1064,7 @@
         except error.CensoredNodeError:
             if self._repo.ui.config("censor", "policy", "abort") == "ignore":
                 return ""
-            raise util.Abort(_("censored node: %s") % short(self._filenode),
+            raise error.Abort(_("censored node: %s") % short(self._filenode),
                              hint=_("set censor.policy to ignore errors"))
 
     def size(self):
@@ -1120,7 +1124,7 @@
             try:
                 branch = encoding.fromlocal(self._repo.dirstate.branch())
             except UnicodeDecodeError:
-                raise util.Abort(_('branch name not in UTF-8!'))
+                raise error.Abort(_('branch name not in UTF-8!'))
             self._extra['branch'] = branch
         if self._extra['branch'] == '':
             self._extra['branch'] = 'default'
@@ -1320,7 +1324,7 @@
         # write changes out explicitly, because nesting wlock at
         # runtime may prevent 'wlock.release()' in 'repo.commit()'
         # from immediately doing so for subsequent changing files
-        self._repo.dirstate.write()
+        self._repo.dirstate.write(self._repo.currenttransaction())
 
 class workingctx(committablectx):
     """A workingctx object makes access to data related to
@@ -1526,7 +1530,7 @@
                     # write changes out explicitly, because nesting
                     # wlock at runtime may prevent 'wlock.release()'
                     # below from doing so for subsequent changing files
-                    self._repo.dirstate.write()
+                    self._repo.dirstate.write(self._repo.currenttransaction())
                 finally:
                     wlock.release()
             except error.LockError:
@@ -1690,7 +1694,7 @@
     def date(self):
         t, tz = self._changectx.date()
         try:
-            return (int(self._repo.wvfs.lstat(self._path).st_mtime), tz)
+            return (util.statmtimesec(self._repo.wvfs.lstat(self._path)), tz)
         except OSError as err:
             if err.errno != errno.ENOENT:
                 raise
--- a/mercurial/copies.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/copies.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,9 +5,15 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-import util, pathutil
+from __future__ import absolute_import
+
 import heapq
 
+from . import (
+    pathutil,
+    util,
+)
+
 def _findlimit(repo, a, b):
     """
     Find the last revision that needs to be checked to ensure that a full
@@ -185,6 +191,9 @@
     return cm
 
 def _backwardrenames(a, b):
+    if a._repo.ui.configbool('experimental', 'disablecopytrace'):
+        return {}
+
     # Even though we're not taking copies into account, 1:n rename situations
     # can still exist (e.g. hg cp a b; hg mv a c). In those cases we
     # arbitrarily pick one of the renames.
@@ -228,6 +237,41 @@
                       % "\n   ".join(u2))
     return u1, u2
 
+def _makegetfctx(ctx):
+    """return a 'getfctx' function suitable for checkcopies usage
+
+    We have to re-setup the function building 'filectx' for each
+    'checkcopies' to ensure the linkrev adjustment is properly setup for
+    each. Linkrev adjustment is important to avoid bug in rename
+    detection. Moreover, having a proper '_ancestrycontext' setup ensures
+    the performance impact of this adjustment is kept limited. Without it,
+    each file could do a full dag traversal making the time complexity of
+    the operation explode (see issue4537).
+
+    This function exists here mostly to limit the impact on stable. Feel
+    free to refactor on default.
+    """
+    rev = ctx.rev()
+    repo = ctx._repo
+    ac = getattr(ctx, '_ancestrycontext', None)
+    if ac is None:
+        revs = [rev]
+        if rev is None:
+            revs = [p.rev() for p in ctx.parents()]
+        ac = repo.changelog.ancestors(revs, inclusive=True)
+        ctx._ancestrycontext = ac
+    def makectx(f, n):
+        if len(n) != 20:  # in a working context?
+            if ctx.rev() is None:
+                return ctx.filectx(f)
+            return repo[None][f]
+        fctx = repo.filectx(f, fileid=n)
+        # setup only needed for filectx not create from a changectx
+        fctx._ancestrycontext = ac
+        fctx._descendantrev = rev
+        return fctx
+    return util.lrucachefunc(makectx)
+
 def mergecopies(repo, c1, c2, ca):
     """
     Find moves and copies between context c1 and c2 that are relevant
@@ -258,71 +302,46 @@
     if c2.node() is None and c1.node() == repo.dirstate.p1():
         return repo.dirstate.copies(), {}, {}, {}
 
+    # Copy trace disabling is explicitly below the node == p1 logic above
+    # because the logic above is required for a simple copy to be kept across a
+    # rebase.
+    if repo.ui.configbool('experimental', 'disablecopytrace'):
+        return {}, {}, {}, {}
+
     limit = _findlimit(repo, c1.rev(), c2.rev())
     if limit is None:
         # no common ancestor, no copies
         return {}, {}, {}, {}
+    repo.ui.debug("  searching for copies back to rev %d\n" % limit)
+
     m1 = c1.manifest()
     m2 = c2.manifest()
     ma = ca.manifest()
 
-
-    def setupctx(ctx):
-        """return a 'makectx' function suitable for checkcopies usage from ctx
-
-        We have to re-setup the function building 'filectx' for each
-        'checkcopies' to ensure the linkrev adjustement is properly setup for
-        each. Linkrev adjustment is important to avoid bug in rename
-        detection. Moreover, having a proper '_ancestrycontext' setup ensures
-        the performance impact of this adjustment is kept limited. Without it,
-        each file could do a full dag traversal making the time complexity of
-        the operation explode (see issue4537).
-
-        This function exists here mostly to limit the impact on stable. Feel
-        free to refactor on default.
-        """
-        rev = ctx.rev()
-        ac = getattr(ctx, '_ancestrycontext', None)
-        if ac is None:
-            revs = [rev]
-            if rev is None:
-                revs = [p.rev() for p in ctx.parents()]
-            ac = ctx._repo.changelog.ancestors(revs, inclusive=True)
-            ctx._ancestrycontext = ac
-        def makectx(f, n):
-            if len(n) != 20:  # in a working context?
-                if c1.rev() is None:
-                    return c1.filectx(f)
-                return c2.filectx(f)
-            fctx = repo.filectx(f, fileid=n)
-            # setup only needed for filectx not create from a changectx
-            fctx._ancestrycontext = ac
-            fctx._descendantrev = rev
-            return fctx
-        return util.lrucachefunc(makectx)
-
-    copy = {}
-    movewithdir = {}
-    fullcopy = {}
+    copy1, copy2, = {}, {}
+    movewithdir1, movewithdir2 = {}, {}
+    fullcopy1, fullcopy2 = {}, {}
     diverge = {}
 
-    repo.ui.debug("  searching for copies back to rev %d\n" % limit)
-
+    # find interesting file sets from manifests
     addedinm1 = m1.filesnotin(ma)
     addedinm2 = m2.filesnotin(ma)
     u1, u2 = _computenonoverlap(repo, c1, c2, addedinm1, addedinm2)
+    bothnew = sorted(addedinm1 & addedinm2)
 
     for f in u1:
-        ctx = setupctx(c1)
-        checkcopies(ctx, f, m1, m2, ca, limit, diverge, copy, fullcopy)
+        checkcopies(c1, f, m1, m2, ca, limit, diverge, copy1, fullcopy1)
 
     for f in u2:
-        ctx = setupctx(c2)
-        checkcopies(ctx, f, m2, m1, ca, limit, diverge, copy, fullcopy)
+        checkcopies(c2, f, m2, m1, ca, limit, diverge, copy2, fullcopy2)
+
+    copy = dict(copy1.items() + copy2.items())
+    movewithdir = dict(movewithdir1.items() + movewithdir2.items())
+    fullcopy = dict(fullcopy1.items() + fullcopy2.items())
 
     renamedelete = {}
-    renamedelete2 = set()
-    diverge2 = set()
+    renamedeleteset = set()
+    divergeset = set()
     for of, fl in diverge.items():
         if len(fl) == 1 or of in c1 or of in c2:
             del diverge[of] # not actually divergent, or not a rename
@@ -330,20 +349,17 @@
                 # renamed on one side, deleted on the other side, but filter
                 # out files that have been renamed and then deleted
                 renamedelete[of] = [f for f in fl if f in c1 or f in c2]
-                renamedelete2.update(fl) # reverse map for below
+                renamedeleteset.update(fl) # reverse map for below
         else:
-            diverge2.update(fl) # reverse map for below
+            divergeset.update(fl) # reverse map for below
 
-    bothnew = sorted(addedinm1 & addedinm2)
     if bothnew:
         repo.ui.debug("  unmatched files new in both:\n   %s\n"
                       % "\n   ".join(bothnew))
     bothdiverge, _copy, _fullcopy = {}, {}, {}
     for f in bothnew:
-        ctx = setupctx(c1)
-        checkcopies(ctx, f, m1, m2, ca, limit, bothdiverge, _copy, _fullcopy)
-        ctx = setupctx(c2)
-        checkcopies(ctx, f, m2, m1, ca, limit, bothdiverge, _copy, _fullcopy)
+        checkcopies(c1, f, m1, m2, ca, limit, bothdiverge, _copy, _fullcopy)
+        checkcopies(c2, f, m2, m1, ca, limit, bothdiverge, _copy, _fullcopy)
     for of, fl in bothdiverge.items():
         if len(fl) == 2 and fl[0] == fl[1]:
             copy[fl[0]] = of # not actually divergent, just matching renames
@@ -355,13 +371,13 @@
             note = ""
             if f in copy:
                 note += "*"
-            if f in diverge2:
+            if f in divergeset:
                 note += "!"
-            if f in renamedelete2:
+            if f in renamedeleteset:
                 note += "%"
             repo.ui.debug("   src: '%s' -> dst: '%s' %s\n" % (fullcopy[f], f,
                                                               note))
-    del diverge2
+    del divergeset
 
     if not fullcopy:
         return copy, movewithdir, diverge, renamedelete
@@ -427,7 +443,7 @@
     """
     check possible copies of f from m1 to m2
 
-    ctx = function accepting (filename, node) that returns a filectx.
+    ctx = starting context for f in m1
     f = the filename to check
     m1 = the source manifest
     m2 = the destination manifest
@@ -439,6 +455,7 @@
     """
 
     ma = ca.manifest()
+    getfctx = _makegetfctx(ctx)
 
     def _related(f1, f2, limit):
         # Walk back to common ancestor to see if the two files originate
@@ -473,7 +490,7 @@
 
     of = None
     seen = set([f])
-    for oc in ctx(f, m1[f]).ancestors():
+    for oc in getfctx(f, m1[f]).ancestors():
         ocr = oc.linkrev()
         of = oc.path()
         if of in seen:
@@ -488,7 +505,7 @@
             continue # no match, keep looking
         if m2[of] == ma.get(of):
             break # no merge needed, quit early
-        c2 = ctx(of, m2[of])
+        c2 = getfctx(of, m2[of])
         cr = _related(oc, c2, ca.rev())
         if cr and (of == f or of == c2.path()): # non-divergent
             copy[f] = of
@@ -507,7 +524,12 @@
     copies between fromrev and rev.
     '''
     exclude = {}
-    if skiprev is not None:
+    if (skiprev is not None and
+        not repo.ui.configbool('experimental', 'disablecopytrace')):
+        # disablecopytrace skips this line, but not the entire function because
+        # the line below is O(size of the repo) during a rebase, while the rest
+        # of the function is much faster (and is required for carrying copy
+        # metadata across the rebase anyway).
         exclude = pathcopies(repo[fromrev], repo[skiprev])
     for dst, src in pathcopies(repo[fromrev], repo[rev]).iteritems():
         # copies.pathcopies returns backward renames, so dst might not
--- a/mercurial/crecord.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/crecord.py	Tue Oct 20 15:59:10 2015 -0500
@@ -8,11 +8,23 @@
 # This code is based on the Mark Edgington's crecord extension.
 # (Itself based on Bryan O'Sullivan's record extension.)
 
-from i18n import _
-import patch as patchmod
-import util, encoding
+from __future__ import absolute_import
 
-import os, re, sys, struct, signal, tempfile, locale, cStringIO
+import cStringIO
+import locale
+import os
+import re
+import signal
+import struct
+import sys
+import tempfile
+
+from .i18n import _
+from . import (
+    encoding,
+    error,
+    patch as patchmod,
+)
 
 # This is required for ncurses to display non-ASCII characters in default user
 # locale encoding correctly.  --immerrr
@@ -21,7 +33,8 @@
 # os.name is one of: 'posix', 'nt', 'dos', 'os2', 'mac', or 'ce'
 if os.name == 'posix':
     import curses
-    import fcntl, termios
+    import fcntl
+    import termios
 else:
     # I have no idea if wcurses works with crecord...
     try:
@@ -34,7 +47,7 @@
     curses
 except NameError:
     if os.name != 'nt':  # Temporary hack to get running on Windows again
-        raise util.Abort(
+        raise error.Abort(
             _('the python curses/wcurses module is not available/installed'))
 
 _origstdout = sys.__stdout__ # used by gethw()
@@ -182,7 +195,7 @@
 class uiheader(patchnode):
     """patch header
 
-    xxx shoudn't we move this to mercurial/patch.py ?
+    xxx shouldn't we move this to mercurial/patch.py ?
     """
 
     def __init__(self, header):
@@ -485,7 +498,7 @@
     f = signal.getsignal(signal.SIGTSTP)
     curses.wrapper(chunkselector.main)
     if chunkselector.initerr is not None:
-        raise util.Abort(chunkselector.initerr)
+        raise error.Abort(chunkselector.initerr)
     # ncurses does not restore signal handler for SIGTSTP
     signal.signal(signal.SIGTSTP, f)
 
@@ -1421,7 +1434,7 @@
         """
         once we scrolled with pg up pg down we can be pointing outside of the
         display zone. we print the patch with towin=False to compute the
-        location of the selected item eventhough it is outside of the displayed
+        location of the selected item even though it is outside of the displayed
         zone and then update the scroll.
         """
         self.printitem(towin=False)
@@ -1429,7 +1442,7 @@
 
     def toggleedit(self, item=None, test=False):
         """
-            edit the currently chelected chunk
+            edit the currently selected chunk
         """
         def updateui(self):
             self.numpadlines = self.getnumlinesdisplayed(ignorefolding=True) + 1
@@ -1449,7 +1462,7 @@
                 self.ui.write("\n")
                 return None
             # patch comment based on the git one (based on comment at end of
-            # http://mercurial.selenic.com/wiki/recordextension)
+            # https://mercurial-scm.org/wiki/recordextension)
             phelp = '---' + _("""
     to remove '-' lines, make them ' ' lines (context).
     to remove '+' lines, delete them.
@@ -1553,7 +1566,7 @@
         elif keypressed in ["H", "KEY_SLEFT"]:
             self.leftarrowshiftevent()
         elif keypressed in ["q"]:
-            raise util.Abort(_('user quit'))
+            raise error.Abort(_('user quit'))
         elif keypressed in ["c"]:
             if self.confirmcommit():
                 return True
--- a/mercurial/dagparser.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/dagparser.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,9 +5,13 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-import re, string
-import util
-from i18n import _
+from __future__ import absolute_import
+
+import re
+import string
+
+from .i18n import _
+from . import error
 
 def parsedag(desc):
     '''parses a DAG from a concise textual description; generates events
@@ -265,7 +269,7 @@
                 s += c
                 i += 1
                 c = nextch()
-            raise util.Abort(_('invalid character in dag description: '
+            raise error.Abort(_('invalid character in dag description: '
                                '%s...') % s)
 
 def dagtextlines(events,
@@ -294,13 +298,13 @@
 
                 # sanity check
                 if r != wantr:
-                    raise util.Abort(_("expected id %i, got %i") % (wantr, r))
+                    raise error.Abort(_("expected id %i, got %i") % (wantr, r))
                 if not ps:
                     ps = [-1]
                 else:
                     for p in ps:
                         if p >= r:
-                            raise util.Abort(_("parent id %i is larger than "
+                            raise error.Abort(_("parent id %i is larger than "
                                                "current id %i") % (p, r))
                 wantr += 1
 
@@ -362,7 +366,7 @@
                     yield '#' + data
                     yield '\n'
                 else:
-                    raise util.Abort(_("invalid event type in dag: %s")
+                    raise error.Abort(_("invalid event type in dag: %s")
                                      % str((type, data)))
         if run:
             yield '+' + str(run)
--- a/mercurial/dagutil.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/dagutil.py	Tue Oct 20 15:59:10 2015 -0500
@@ -6,9 +6,10 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-from node import nullrev
-from i18n import _
+from __future__ import absolute_import
 
+from .i18n import _
+from .node import nullrev
 
 class basedag(object):
     '''generic interface for DAGs
--- a/mercurial/default.d/mergetools.rc	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/default.d/mergetools.rc	Tue Oct 20 15:59:10 2015 -0500
@@ -88,6 +88,10 @@
 filemerge.args=-left $other -right $local -ancestor $base -merge $output
 filemerge.gui=True
 
+filemergexcode.executable=/Applications/Xcode.app/Contents/Applications/FileMerge.app/Contents/MacOS/FileMerge
+filemergexcode.args=-left $other -right $local -ancestor $base -merge $output
+filemergexcode.gui=True
+
 ; Windows version of Beyond Compare
 beyondcompare3.args=$local $other $base $output /ro /lefttitle=local /centertitle=base /righttitle=other /automerge /reviewconflicts /solo
 beyondcompare3.regkey=Software\Scooter Software\Beyond Compare 3
--- a/mercurial/demandimport.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/demandimport.py	Tue Oct 20 15:59:10 2015 -0500
@@ -24,8 +24,11 @@
   b = __import__(a)
 '''
 
-import os, sys
-from contextlib import contextmanager
+from __future__ import absolute_import
+
+import contextlib
+import os
+import sys
 
 # __builtin__ in Python 2, builtins in Python 3.
 try:
@@ -33,26 +36,21 @@
 except ImportError:
     import builtins
 
+contextmanager = contextlib.contextmanager
+
 _origimport = __import__
 
 nothing = object()
 
-try:
-    # Python 3 doesn't have relative imports nor level -1.
-    level = -1
-    if sys.version_info[0] >= 3:
-        level = 0
-    _origimport(builtins.__name__, {}, {}, None, level)
-except TypeError: # no level argument
-    def _import(name, globals, locals, fromlist, level):
-        "call _origimport with no level argument"
-        return _origimport(name, globals, locals, fromlist)
-else:
-    _import = _origimport
+# Python 3 doesn't have relative imports nor level -1.
+level = -1
+if sys.version_info[0] >= 3:
+    level = 0
+_import = _origimport
 
-def _hgextimport(importfunc, name, globals, *args):
+def _hgextimport(importfunc, name, globals, *args, **kwargs):
     try:
-        return importfunc(name, globals, *args)
+        return importfunc(name, globals, *args, **kwargs)
     except ImportError:
         if not globals:
             raise
@@ -63,7 +61,7 @@
         if nameroot != contextroot:
             raise
         # retry to import with "hgext_" prefix
-        return importfunc(hgextname, globals, *args)
+        return importfunc(hgextname, globals, *args, **kwargs)
 
 class _demandmod(object):
     """module demand-loader and proxy"""
@@ -75,14 +73,26 @@
             head = name
             after = []
         object.__setattr__(self, "_data",
-                           (head, globals, locals, after, level))
+                           (head, globals, locals, after, level, set()))
         object.__setattr__(self, "_module", None)
     def _extend(self, name):
         """add to the list of submodules to load"""
         self._data[3].append(name)
+
+    def _addref(self, name):
+        """Record that the named module ``name`` imports this module.
+
+        References to this proxy class having the name of this module will be
+        replaced at module load time. We assume the symbol inside the importing
+        module is identical to the "head" name of this module. We don't
+        actually know if "as X" syntax is being used to change the symbol name
+        because this information isn't exposed to __import__.
+        """
+        self._data[5].add(name)
+
     def _load(self):
         if not self._module:
-            head, globals, locals, after, level = self._data
+            head, globals, locals, after, level, modrefs = self._data
             mod = _hgextimport(_import, head, globals, locals, None, level)
             # load submodules
             def subload(mod, p):
@@ -97,9 +107,15 @@
             for x in after:
                 subload(mod, x)
 
-            # are we in the locals dictionary still?
+            # Replace references to this proxy instance with the actual module.
             if locals and locals.get(head) == self:
                 locals[head] = mod
+
+            for modname in modrefs:
+                modref = sys.modules.get(modname, None)
+                if modref and getattr(modref, head, None) == self:
+                    setattr(modref, head, mod)
+
             object.__setattr__(self, "_module", mod)
 
     def __repr__(self):
@@ -109,7 +125,7 @@
     def __call__(self, *args, **kwargs):
         raise TypeError("%s object is not callable" % repr(self))
     def __getattribute__(self, attr):
-        if attr in ('_data', '_extend', '_load', '_module'):
+        if attr in ('_data', '_extend', '_load', '_module', '_addref'):
             return object.__getattribute__(self, attr)
         self._load()
         return getattr(self._module, attr)
@@ -135,23 +151,70 @@
                 return locals[base]
         return _demandmod(name, globals, locals, level)
     else:
-        if level != -1:
-            # from . import b,c,d or from .a import b,c,d
-            return _origimport(name, globals, locals, fromlist, level)
+        # There is a fromlist.
         # from a import b,c,d
+        # from . import b,c,d
+        # from .a import b,c,d
+
+        # level == -1: relative and absolute attempted (Python 2 only).
+        # level >= 0: absolute only (Python 2 w/ absolute_import and Python 3).
+        # The modern Mercurial convention is to use absolute_import everywhere,
+        # so modern Mercurial code will have level >= 0.
+
+        # The name of the module the import statement is located in.
+        globalname = globals.get('__name__')
+
+        def processfromitem(mod, attr, **kwargs):
+            """Process an imported symbol in the import statement.
+
+            If the symbol doesn't exist in the parent module, it must be a
+            module. We set missing modules up as _demandmod instances.
+            """
+            symbol = getattr(mod, attr, nothing)
+            if symbol is nothing:
+                symbol = _demandmod(attr, mod.__dict__, locals, **kwargs)
+                setattr(mod, attr, symbol)
+
+            # Record the importing module references this symbol so we can
+            # replace the symbol with the actual module instance at load
+            # time.
+            if globalname and isinstance(symbol, _demandmod):
+                symbol._addref(globalname)
+
+        if level >= 0:
+            # Mercurial's enforced import style does not use
+            # "from a import b,c,d" or "from .a import b,c,d" syntax. In
+            # addition, this appears to be giving errors with some modules
+            # for unknown reasons. Since we shouldn't be using this syntax
+            # much, work around the problems.
+            if name:
+                return _hgextimport(_origimport, name, globals, locals,
+                                    fromlist, level)
+
+            mod = _hgextimport(_origimport, name, globals, locals, level=level)
+
+            for x in fromlist:
+                processfromitem(mod, x, level=level)
+
+            return mod
+
+        # But, we still need to support lazy loading of standard library and 3rd
+        # party modules. So handle level == -1.
         mod = _hgextimport(_origimport, name, globals, locals)
         # recurse down the module chain
         for comp in name.split('.')[1:]:
             if getattr(mod, comp, nothing) is nothing:
-                setattr(mod, comp, _demandmod(comp, mod.__dict__, mod.__dict__))
+                setattr(mod, comp,
+                        _demandmod(comp, mod.__dict__, mod.__dict__))
             mod = getattr(mod, comp)
+
         for x in fromlist:
-            # set requested submodules for demand load
-            if getattr(mod, x, nothing) is nothing:
-                setattr(mod, x, _demandmod(x, mod.__dict__, locals))
+            processfromitem(mod, x)
+
         return mod
 
 ignore = [
+    '__future__',
     '_hashlib',
     '_xmlplus',
     'fcntl',
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/destutil.py	Tue Oct 20 15:59:10 2015 -0500
@@ -0,0 +1,200 @@
+# destutil.py - Mercurial utility function for command destination
+#
+#  Copyright Matt Mackall <mpm@selenic.com> and other
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from .i18n import _
+from . import (
+    bookmarks,
+    error,
+    obsolete,
+)
+
+def _destupdatevalidate(repo, rev, clean, check):
+    """validate that the destination comply to various rules
+
+    This exists as its own function to help wrapping from extensions."""
+    wc = repo[None]
+    p1 = wc.p1()
+    if not clean:
+        # Check that the update is linear.
+        #
+        # Mercurial do not allow update-merge for non linear pattern
+        # (that would be technically possible but was considered too confusing
+        # for user a long time ago)
+        #
+        # See mercurial.merge.update for details
+        if p1.rev() not in repo.changelog.ancestors([rev], inclusive=True):
+            dirty = wc.dirty(missing=True)
+            foreground = obsolete.foreground(repo, [p1.node()])
+            if not repo[rev].node() in foreground:
+                if dirty:
+                    msg = _("uncommitted changes")
+                    hint = _("commit and merge, or update --clean to"
+                             " discard changes")
+                    raise error.UpdateAbort(msg, hint=hint)
+                elif not check:  # destination is not a descendant.
+                    msg = _("not a linear update")
+                    hint = _("merge or update --check to force update")
+                    raise error.UpdateAbort(msg, hint=hint)
+
+def _destupdateobs(repo, clean, check):
+    """decide of an update destination from obsolescence markers"""
+    node = None
+    wc = repo[None]
+    p1 = wc.p1()
+    movemark = None
+
+    if p1.obsolete() and not p1.children():
+        # allow updating to successors
+        successors = obsolete.successorssets(repo, p1.node())
+
+        # behavior of certain cases is as follows,
+        #
+        # divergent changesets: update to highest rev, similar to what
+        #     is currently done when there are more than one head
+        #     (i.e. 'tip')
+        #
+        # replaced changesets: same as divergent except we know there
+        # is no conflict
+        #
+        # pruned changeset: no update is done; though, we could
+        #     consider updating to the first non-obsolete parent,
+        #     similar to what is current done for 'hg prune'
+
+        if successors:
+            # flatten the list here handles both divergent (len > 1)
+            # and the usual case (len = 1)
+            successors = [n for sub in successors for n in sub]
+
+            # get the max revision for the given successors set,
+            # i.e. the 'tip' of a set
+            node = repo.revs('max(%ln)', successors).first()
+            if bookmarks.isactivewdirparent(repo):
+                movemark = repo['.'].node()
+    return node, movemark, None
+
+def _destupdatebook(repo, clean, check):
+    """decide on an update destination from active bookmark"""
+    # we also move the active bookmark, if any
+    activemark = None
+    node, movemark = bookmarks.calculateupdate(repo.ui, repo, None)
+    if node is not None:
+        activemark = node
+    return node, movemark, activemark
+
+def _destupdatebranch(repo, clean, check):
+    """decide on an update destination from current branch"""
+    wc = repo[None]
+    movemark = node = None
+    try:
+        node = repo.branchtip(wc.branch())
+        if bookmarks.isactivewdirparent(repo):
+            movemark = repo['.'].node()
+    except error.RepoLookupError:
+        if wc.branch() == 'default': # no default branch!
+            node = repo.lookup('tip') # update to tip
+        else:
+            raise error.Abort(_("branch %s not found") % wc.branch())
+    return node, movemark, None
+
+# order in which each step should be evalutated
+# steps are run until one finds a destination
+destupdatesteps = ['evolution', 'bookmark', 'branch']
+# mapping to ease extension overriding steps.
+destupdatestepmap = {'evolution': _destupdateobs,
+                     'bookmark': _destupdatebook,
+                     'branch': _destupdatebranch,
+                     }
+
+def destupdate(repo, clean=False, check=False):
+    """destination for bare update operation
+
+    return (rev, movemark, activemark)
+
+    - rev: the revision to update to,
+    - movemark: node to move the active bookmark from
+                (cf bookmark.calculate update),
+    - activemark: a bookmark to activate at the end of the update.
+    """
+    node = movemark = activemark = None
+
+    for step in destupdatesteps:
+        node, movemark, activemark = destupdatestepmap[step](repo, clean, check)
+        if node is not None:
+            break
+    rev = repo[node].rev()
+
+    _destupdatevalidate(repo, rev, clean, check)
+
+    return rev, movemark, activemark
+
+def _destmergebook(repo):
+    """find merge destination in the active bookmark case"""
+    node = None
+    bmheads = repo.bookmarkheads(repo._activebookmark)
+    curhead = repo[repo._activebookmark].node()
+    if len(bmheads) == 2:
+        if curhead == bmheads[0]:
+            node = bmheads[1]
+        else:
+            node = bmheads[0]
+    elif len(bmheads) > 2:
+        raise error.Abort(_("multiple matching bookmarks to merge - "
+            "please merge with an explicit rev or bookmark"),
+            hint=_("run 'hg heads' to see all heads"))
+    elif len(bmheads) <= 1:
+        raise error.Abort(_("no matching bookmark to merge - "
+            "please merge with an explicit rev or bookmark"),
+            hint=_("run 'hg heads' to see all heads"))
+    assert node is not None
+    return node
+
+def _destmergebranch(repo):
+    """find merge destination based on branch heads"""
+    node = None
+    branch = repo[None].branch()
+    bheads = repo.branchheads(branch)
+    nbhs = [bh for bh in bheads if not repo[bh].bookmarks()]
+
+    if len(nbhs) > 2:
+        raise error.Abort(_("branch '%s' has %d heads - "
+                           "please merge with an explicit rev")
+                         % (branch, len(bheads)),
+                         hint=_("run 'hg heads .' to see heads"))
+
+    parent = repo.dirstate.p1()
+    if len(nbhs) <= 1:
+        if len(bheads) > 1:
+            raise error.Abort(_("heads are bookmarked - "
+                               "please merge with an explicit rev"),
+                             hint=_("run 'hg heads' to see all heads"))
+        if len(repo.heads()) > 1:
+            raise error.Abort(_("branch '%s' has one head - "
+                               "please merge with an explicit rev")
+                             % branch,
+                             hint=_("run 'hg heads' to see all heads"))
+        msg, hint = _('nothing to merge'), None
+        if parent != repo.lookup(branch):
+            hint = _("use 'hg update' instead")
+        raise error.Abort(msg, hint=hint)
+
+    if parent not in bheads:
+        raise error.Abort(_('working directory not at a head revision'),
+                         hint=_("use 'hg update' or merge with an "
+                                "explicit revision"))
+    if parent == nbhs[0]:
+        node = nbhs[-1]
+    else:
+        node = nbhs[0]
+    assert node is not None
+    return node
+
+def destmerge(repo):
+    if repo._activebookmark:
+        node = _destmergebook(repo)
+    else:
+        node = _destmergebranch(repo)
+    return repo[node].rev()
--- a/mercurial/dirstate.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/dirstate.py	Tue Oct 20 15:59:10 2015 -0500
@@ -7,7 +7,7 @@
 
 from node import nullid
 from i18n import _
-import scmutil, util, osutil, parsers, encoding, pathutil
+import scmutil, util, osutil, parsers, encoding, pathutil, error
 import os, stat, errno
 import match as matchmod
 
@@ -27,6 +27,31 @@
     def join(self, obj, fname):
         return obj._join(fname)
 
+def _getfsnow(vfs):
+    '''Get "now" timestamp on filesystem'''
+    tmpfd, tmpname = vfs.mkstemp()
+    try:
+        return util.statmtimesec(os.fstat(tmpfd))
+    finally:
+        os.close(tmpfd)
+        vfs.unlink(tmpname)
+
+def _trypending(root, vfs, filename):
+    '''Open  file to be read according to HG_PENDING environment variable
+
+    This opens '.pending' of specified 'filename' only when HG_PENDING
+    is equal to 'root'.
+
+    This returns '(fp, is_pending_opened)' tuple.
+    '''
+    if root == os.environ.get('HG_PENDING'):
+        try:
+            return (vfs('%s.pending' % filename), True)
+        except IOError as inst:
+            if inst.errno != errno.ENOENT:
+                raise
+    return (vfs(filename), False)
+
 class dirstate(object):
 
     def __init__(self, opener, ui, root, validate):
@@ -42,6 +67,10 @@
         # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
         # UNC path pointing to root share (issue4557)
         self._rootdir = pathutil.normasprefix(root)
+        # internal config: ui.forcecwd
+        forcecwd = ui.config('ui', 'forcecwd')
+        if forcecwd:
+            self._cwd = forcecwd
         self._dirty = False
         self._dirtypl = False
         self._lastnormaltime = 0
@@ -49,6 +78,10 @@
         self._filecache = {}
         self._parentwriters = 0
         self._filename = 'dirstate'
+        self._pendingfilename = '%s.pending' % self._filename
+
+        # for consistent view between _pl() and _read() invocations
+        self._pendingmode = None
 
     def beginparentchange(self):
         '''Marks the beginning of a set of changes that involve changing
@@ -123,14 +156,14 @@
     @propertycache
     def _pl(self):
         try:
-            fp = self._opener(self._filename)
+            fp = self._opendirstatefile()
             st = fp.read(40)
             fp.close()
             l = len(st)
             if l == 40:
                 return st[:20], st[20:40]
             elif l > 0 and l < 40:
-                raise util.Abort(_('working directory state appears damaged!'))
+                raise error.Abort(_('working directory state appears damaged!'))
         except IOError as err:
             if err.errno != errno.ENOENT:
                 raise
@@ -220,6 +253,12 @@
         return os.getcwd()
 
     def getcwd(self):
+        '''Return the path from which a canonical path is calculated.
+
+        This path should be used to resolve file patterns or to convert
+        canonical paths back to file paths for display. It shouldn't be
+        used to get real file paths. Use vfs functions instead.
+        '''
         cwd = self._cwd
         if cwd == self._root:
             return ''
@@ -322,11 +361,20 @@
             f.discard()
             raise
 
+    def _opendirstatefile(self):
+        fp, mode = _trypending(self._root, self._opener, self._filename)
+        if self._pendingmode is not None and self._pendingmode != mode:
+            fp.close()
+            raise error.Abort(_('working directory state may be '
+                                'changed parallelly'))
+        self._pendingmode = mode
+        return fp
+
     def _read(self):
         self._map = {}
         self._copymap = {}
         try:
-            fp = self._opener.open(self._filename)
+            fp = self._opendirstatefile()
             try:
                 st = fp.read()
             finally:
@@ -402,13 +450,13 @@
         if state == 'a' or oldstate == 'r':
             scmutil.checkfilename(f)
             if f in self._dirs:
-                raise util.Abort(_('directory %r already in dirstate') % f)
+                raise error.Abort(_('directory %r already in dirstate') % f)
             # shadows
             for d in util.finddirs(f):
                 if d in self._dirs:
                     break
                 if d in self._map and self[d] != 'r':
-                    raise util.Abort(
+                    raise error.Abort(
                         _('file %r in dirstate clashes with %r') % (d, f))
         if oldstate in "?r" and "_dirs" in self.__dict__:
             self._dirs.addpath(f)
@@ -418,7 +466,7 @@
     def normal(self, f):
         '''Mark a file normal and clean.'''
         s = os.lstat(self._join(f))
-        mtime = int(s.st_mtime)
+        mtime = util.statmtimesec(s)
         self._addpath(f, 'n', s.st_mode,
                       s.st_size & _rangemask, mtime & _rangemask)
         if f in self._copymap:
@@ -454,7 +502,7 @@
     def otherparent(self, f):
         '''Mark as coming from the other parent, always dirty.'''
         if self._pl[1] == nullid:
-            raise util.Abort(_("setting %r to other parent "
+            raise error.Abort(_("setting %r to other parent "
                                "only allowed in merges") % f)
         if f in self and self[f] == 'n':
             # merge-like
@@ -600,7 +648,7 @@
         self._pl = (parent, nullid)
         self._dirty = True
 
-    def write(self):
+    def write(self, tr=False):
         if not self._dirty:
             return
 
@@ -611,10 +659,47 @@
             import time # to avoid useless import
             time.sleep(delaywrite)
 
-        st = self._opener(self._filename, "w", atomictemp=True)
+        filename = self._filename
+        if tr is False: # not explicitly specified
+            if (self._ui.configbool('devel', 'all-warnings')
+                or self._ui.configbool('devel', 'check-dirstate-write')):
+                self._ui.develwarn('use dirstate.write with '
+                                   'repo.currenttransaction()')
+
+            if self._opener.lexists(self._pendingfilename):
+                # if pending file already exists, in-memory changes
+                # should be written into it, because it has priority
+                # to '.hg/dirstate' at reading under HG_PENDING mode
+                filename = self._pendingfilename
+        elif tr:
+            # 'dirstate.write()' is not only for writing in-memory
+            # changes out, but also for dropping ambiguous timestamp.
+            # delayed writing re-raise "ambiguous timestamp issue".
+            # See also the wiki page below for detail:
+            # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
+
+            # emulate dropping timestamp in 'parsers.pack_dirstate'
+            now = _getfsnow(self._opener)
+            dmap = self._map
+            for f, e in dmap.iteritems():
+                if e[0] == 'n' and e[3] == now:
+                    dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
+
+            # emulate that all 'dirstate.normal' results are written out
+            self._lastnormaltime = 0
+
+            # delay writing in-memory changes out
+            tr.addfilegenerator('dirstate', (self._filename,),
+                                self._writedirstate, location='plain')
+            return
+
+        st = self._opener(filename, "w", atomictemp=True)
+        self._writedirstate(st)
+
+    def _writedirstate(self, st):
         # use the modification time of the newly created temporary file as the
         # filesystem's notion of 'now'
-        now = util.fstat(st).st_mtime
+        now = util.statmtimesec(util.fstat(st)) & _rangemask
         st.write(parsers.pack_dirstate(self._map, self._copymap, self._pl, now))
         st.close()
         self._lastnormaltime = 0
@@ -918,8 +1003,14 @@
                 # We may not have walked the full directory tree above,
                 # so stat and check everything we missed.
                 nf = iter(visit).next
-                for st in util.statfiles([join(i) for i in visit]):
-                    results[nf()] = st
+                pos = 0
+                while pos < len(visit):
+                    # visit in mid-sized batches so that we don't
+                    # block signals indefinitely
+                    xr = xrange(pos, min(len(visit), pos + 1000))
+                    for st in util.statfiles([join(visit[n]) for n in xr]):
+                        results[nf()] = st
+                    pos += 1000
         return results
 
     def status(self, match, subrepos, ignored, clean, unknown):
@@ -988,7 +1079,7 @@
             if not st and state in "nma":
                 dadd(fn)
             elif state == 'n':
-                mtime = int(st.st_mtime)
+                mtime = util.statmtimesec(st)
                 if (size >= 0 and
                     ((size != st.st_size and size != st.st_size & _rangemask)
                      or ((mode ^ st.st_mode) & 0o100 and checkexec))
@@ -1032,3 +1123,45 @@
             # that
             return list(files)
         return [f for f in dmap if match(f)]
+
+    def _actualfilename(self, tr):
+        if tr:
+            return self._pendingfilename
+        else:
+            return self._filename
+
+    def _savebackup(self, tr, suffix):
+        '''Save current dirstate into backup file with suffix'''
+        filename = self._actualfilename(tr)
+
+        # use '_writedirstate' instead of 'write' to write changes certainly,
+        # because the latter omits writing out if transaction is running.
+        # output file will be used to create backup of dirstate at this point.
+        self._writedirstate(self._opener(filename, "w", atomictemp=True))
+
+        if tr:
+            # ensure that subsequent tr.writepending returns True for
+            # changes written out above, even if dirstate is never
+            # changed after this
+            tr.addfilegenerator('dirstate', (self._filename,),
+                                self._writedirstate, location='plain')
+
+            # ensure that pending file written above is unlinked at
+            # failure, even if tr.writepending isn't invoked until the
+            # end of this transaction
+            tr.registertmp(filename, location='plain')
+
+        self._opener.write(filename + suffix, self._opener.tryread(filename))
+
+    def _restorebackup(self, tr, suffix):
+        '''Restore dirstate by backup file with suffix'''
+        # this "invalidate()" prevents "wlock.release()" from writing
+        # changes of dirstate out after restoring from backup file
+        self.invalidate()
+        filename = self._actualfilename(tr)
+        self._opener.rename(filename + suffix, filename)
+
+    def _clearbackup(self, tr, suffix):
+        '''Clear backup file with suffix'''
+        filename = self._actualfilename(tr)
+        self._opener.unlink(filename + suffix)
--- a/mercurial/discovery.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/discovery.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,10 +5,24 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-from node import nullid, short
-from i18n import _
-import util, setdiscovery, treediscovery, phases, obsolete, bookmarks
-import branchmap
+from __future__ import absolute_import
+
+from .i18n import _
+from .node import (
+    nullid,
+    short,
+)
+
+from . import (
+    bookmarks,
+    branchmap,
+    error,
+    obsolete,
+    phases,
+    setdiscovery,
+    treediscovery,
+    util,
+)
 
 def findcommonincoming(repo, remote, heads=None, force=False):
     """Return a tuple (common, anyincoming, heads) used to identify the common
@@ -249,7 +263,7 @@
     # 1. Check for new branches on the remote.
     if newbranches and not newbranch:  # new branch requires --new-branch
         branchnames = ', '.join(sorted(newbranches))
-        raise util.Abort(_("push creates new remote branches: %s!")
+        raise error.Abort(_("push creates new remote branches: %s!")
                            % branchnames,
                          hint=_("use 'hg push --new-branch' to create"
                                 " new remote branches"))
@@ -271,7 +285,7 @@
     # 3. Check for new heads.
     # If there are more heads after the push than before, a suitable
     # error message, depending on unsynced status, is displayed.
-    error = None
+    errormsg = None
     # If there is no obsstore, allfuturecommon won't be used, so no
     # need to compute it.
     if repo.obsstore:
@@ -291,11 +305,13 @@
         candidate_newhs.update(unsyncedheads)
         dhs = None # delta heads, the new heads on branch
         discardedheads = set()
-        if repo.obsstore:
+        if not repo.obsstore:
+            newhs = candidate_newhs
+        else:
             # remove future heads which are actually obsoleted by another
             # pushed element:
             #
-            # XXX as above, There are several cases this case does not handle
+            # XXX as above, There are several cases this code does not handle
             # XXX properly
             #
             # (1) if <nh> is public, it won't be affected by obsolete marker
@@ -306,6 +322,9 @@
             #
             # These two cases will be easy to handle for known changeset but
             # much more tricky for unsynced changes.
+            #
+            # In addition, this code is confused by prune as it only looks for
+            # successors of the heads (none if pruned) leading to issue4354
             newhs = set()
             for nh in candidate_newhs:
                 if nh in repo and repo[nh].phase() <= phases.public:
@@ -317,8 +336,6 @@
                             break
                     else:
                         newhs.add(nh)
-        else:
-            newhs = candidate_newhs
         unsynced = sorted(h for h in unsyncedheads if h not in discardedheads)
         if unsynced:
             if None in unsynced:
@@ -341,9 +358,9 @@
         if remoteheads is None:
             if len(newhs) > 1:
                 dhs = list(newhs)
-                if error is None:
-                    error = (_("push creates new branch '%s' "
-                               "with multiple heads") % (branch))
+                if errormsg is None:
+                    errormsg = (_("push creates new branch '%s' "
+                                  "with multiple heads") % (branch))
                     hint = _("merge or"
                              " see \"hg help push\" for details about"
                              " pushing new heads")
@@ -351,17 +368,17 @@
             # remove bookmarked or existing remote heads from the new heads list
             dhs = sorted(newhs - bookmarkedheads - oldhs)
         if dhs:
-            if error is None:
+            if errormsg is None:
                 if branch not in ('default', None):
-                    error = _("push creates new remote head %s "
-                              "on branch '%s'!") % (short(dhs[0]), branch)
+                    errormsg = _("push creates new remote head %s "
+                                 "on branch '%s'!") % (short(dhs[0]), branch)
                 elif repo[dhs[0]].bookmarks():
-                    error = _("push creates new remote head %s "
-                              "with bookmark '%s'!") % (
-                              short(dhs[0]), repo[dhs[0]].bookmarks()[0])
+                    errormsg = _("push creates new remote head %s "
+                                 "with bookmark '%s'!") % (
+                                 short(dhs[0]), repo[dhs[0]].bookmarks()[0])
                 else:
-                    error = _("push creates new remote head %s!"
-                              ) % short(dhs[0])
+                    errormsg = _("push creates new remote head %s!"
+                                 ) % short(dhs[0])
                 if unsyncedheads:
                     hint = _("pull and merge or"
                              " see \"hg help push\" for details about"
@@ -376,5 +393,5 @@
                 repo.ui.note(_("new remote heads on branch '%s':\n") % branch)
             for h in dhs:
                 repo.ui.note((" %s\n") % short(h))
-    if error:
-        raise util.Abort(error, hint=hint)
+    if errormsg:
+        raise error.Abort(errormsg, hint=hint)
--- a/mercurial/dispatch.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/dispatch.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,13 +5,37 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-from i18n import _
-import os, sys, atexit, signal, pdb, socket, errno, shlex, time, traceback, re
+from __future__ import absolute_import
+
+import atexit
 import difflib
-import util, commands, hg, fancyopts, extensions, hook, error
-import cmdutil, encoding
-import ui as uimod
-import demandimport
+import errno
+import os
+import pdb
+import re
+import shlex
+import signal
+import socket
+import sys
+import time
+import traceback
+
+
+from .i18n import _
+
+from . import (
+    cmdutil,
+    commands,
+    demandimport,
+    encoding,
+    error,
+    extensions,
+    fancyopts,
+    hg,
+    hook,
+    ui as uimod,
+    util,
+)
 
 class request(object):
     def __init__(self, args, ui=None, repo=None, fin=None, fout=None,
@@ -76,7 +100,7 @@
             req.ui.fout = req.fout
         if req.ferr:
             req.ui.ferr = req.ferr
-    except util.Abort as inst:
+    except error.Abort as inst:
         ferr.write(_("abort: %s\n") % inst)
         if inst.hint:
             ferr.write(_("(%s)\n") % inst.hint)
@@ -157,8 +181,8 @@
                     debugtrace[debugger] == debugtrace['pdb']):
                     ui.warn(_("%s debugger specified "
                               "but its module was not found\n") % debugger)
-
-                debugtrace[debugger]()
+                with demandimport.deactivated():
+                    debugtrace[debugger]()
             try:
                 return _dispatch(req)
             finally:
@@ -229,7 +253,7 @@
             # check if the command is in a disabled extension
             # (but don't check for extensions themselves)
             commands.help_(ui, inst.args[0], unknowncmd=True)
-        except error.UnknownCommand:
+        except (error.UnknownCommand, error.Abort):
             suggested = False
             if len(inst.args) == 2:
                 sim = _getsimilar(inst.args[1], inst.args[0])
@@ -242,7 +266,7 @@
     except error.InterventionRequired as inst:
         ui.warn("%s\n" % inst)
         return 1
-    except util.Abort as inst:
+    except error.Abort as inst:
         ui.warn(_("abort: %s\n") % inst)
         if inst.hint:
             ui.warn(_("(%s)\n") % inst.hint)
@@ -268,8 +292,7 @@
             ui.warn(_("abort: error: %s\n") % reason)
         elif (util.safehasattr(inst, "args")
               and inst.args and inst.args[0] == errno.EPIPE):
-            if ui.debugflag:
-                ui.warn(_("broken pipe\n"))
+            pass
         elif getattr(inst, "strerror", None):
             if getattr(inst, "filename", None):
                 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
@@ -286,10 +309,7 @@
         try:
             ui.warn(_("interrupted!\n"))
         except IOError as inst:
-            if inst.errno == errno.EPIPE:
-                if ui.debugflag:
-                    ui.warn(_("\nbroken pipe\n"))
-            else:
+            if inst.errno != errno.EPIPE:
                 raise
     except MemoryError:
         ui.warn(_("abort: out of memory\n"))
@@ -311,26 +331,27 @@
         compare = myver.split('+')[0]
         ct = tuplever(compare)
         worst = None, ct, ''
-        for name, mod in extensions.extensions():
-            testedwith = getattr(mod, 'testedwith', '')
-            report = getattr(mod, 'buglink', _('the extension author.'))
-            if not testedwith.strip():
-                # We found an untested extension. It's likely the culprit.
-                worst = name, 'unknown', report
-                break
+        if ui.config('ui', 'supportcontact', None) is None:
+            for name, mod in extensions.extensions():
+                testedwith = getattr(mod, 'testedwith', '')
+                report = getattr(mod, 'buglink', _('the extension author.'))
+                if not testedwith.strip():
+                    # We found an untested extension. It's likely the culprit.
+                    worst = name, 'unknown', report
+                    break
 
-            # Never blame on extensions bundled with Mercurial.
-            if testedwith == 'internal':
-                continue
+                # Never blame on extensions bundled with Mercurial.
+                if testedwith == 'internal':
+                    continue
 
-            tested = [tuplever(t) for t in testedwith.split()]
-            if ct in tested:
-                continue
+                tested = [tuplever(t) for t in testedwith.split()]
+                if ct in tested:
+                    continue
 
-            lower = [t for t in tested if t < ct]
-            nearest = max(lower or tested)
-            if worst[0] is None or nearest < worst[1]:
-                worst = name, nearest, report
+                lower = [t for t in tested if t < ct]
+                nearest = max(lower or tested)
+                if worst[0] is None or nearest < worst[1]:
+                    worst = name, nearest, report
         if worst[0] is not None:
             name, testedwith, report = worst
             if not isinstance(testedwith, str):
@@ -342,9 +363,11 @@
                          '** If that fixes the bug please report it to %s\n')
                        % (name, testedwith, name, report))
         else:
+            bugtracker = ui.config('ui', 'supportcontact', None)
+            if bugtracker is None:
+                bugtracker = _("https://mercurial-scm.org/wiki/BugTracker")
             warning = (_("** unknown exception encountered, "
-                         "please report by visiting\n") +
-                       _("** http://mercurial.selenic.com/wiki/BugTracker\n"))
+                         "please report by visiting\n** ") + bugtracker + '\n')
         warning += ((_("** Python %s\n") % sys.version.replace('\n', '')) +
                     (_("** Mercurial Distributed SCM (version %s)\n") % myver) +
                     (_("** Extensions loaded: %s\n") %
@@ -375,7 +398,7 @@
             nums.append(num)
             if num < len(givenargs):
                 return givenargs[num]
-            raise util.Abort(_('too few arguments for command alias'))
+            raise error.Abort(_('too few arguments for command alias'))
         cmd = re.sub(r'\$(\d+|\$)', replacer, cmd)
         givenargs = [x for i, x in enumerate(givenargs)
                      if i not in nums]
@@ -502,7 +525,7 @@
                     hint = _("'%s' is provided by '%s' extension") % (cmd, ext)
                 except error.UnknownCommand:
                     pass
-            raise util.Abort(self.badalias, hint=hint)
+            raise error.Abort(self.badalias, hint=hint)
         if self.shadows:
             ui.debug("alias '%s' shadows command '%s'\n" %
                      (self.name, self.cmdname))
@@ -591,7 +614,7 @@
             ui.setconfig(section, name, value, '--config')
             configs.append((section, name, value))
         except (IndexError, ValueError):
-            raise util.Abort(_('malformed --config option: %r '
+            raise error.Abort(_('malformed --config option: %r '
                                '(use --config section.name=value)') % cfg)
 
     return configs
@@ -667,7 +690,7 @@
     try:
         wd = os.getcwd()
     except OSError as e:
-        raise util.Abort(_("error getting current working directory: %s") %
+        raise error.Abort(_("error getting current working directory: %s") %
                          e.strerror)
     path = cmdutil.findrepo(wd) or ""
     if not path:
@@ -790,11 +813,11 @@
     cmd, func, args, options, cmdoptions = _parse(lui, args)
 
     if options["config"]:
-        raise util.Abort(_("option --config may not be abbreviated!"))
+        raise error.Abort(_("option --config may not be abbreviated!"))
     if options["cwd"]:
-        raise util.Abort(_("option --cwd may not be abbreviated!"))
+        raise error.Abort(_("option --cwd may not be abbreviated!"))
     if options["repository"]:
-        raise util.Abort(_(
+        raise error.Abort(_(
             "option -R has to be separated from other options (e.g. not -qR) "
             "and --repository may only be abbreviated as --repo!"))
 
@@ -861,11 +884,13 @@
             try:
                 repo = hg.repository(ui, path=path)
                 if not repo.local():
-                    raise util.Abort(_("repository '%s' is not local") % path)
+                    raise error.Abort(_("repository '%s' is not local") % path)
                 repo.ui.setconfig("bundle", "mainreporoot", repo.root, 'repo')
             except error.RequirementError:
                 raise
             except error.RepoError:
+                if rpath and rpath[-1]: # invalid -R path
+                    raise
                 if cmd not in commands.optionalrepo.split():
                     if (cmd in commands.inferrepo.split() and
                         args and not path): # try to infer -R from command args
@@ -909,9 +934,9 @@
         format = 'text'
 
     try:
-        from mercurial import lsprof
+        from . import lsprof
     except ImportError:
-        raise util.Abort(_(
+        raise error.Abort(_(
             'lsprof not available - install from '
             'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
     p = lsprof.Profiler()
@@ -922,7 +947,7 @@
         p.disable()
 
         if format == 'kcachegrind':
-            import lsprofcalltree
+            from . import lsprofcalltree
             calltree = lsprofcalltree.KCacheGrind(p)
             calltree.output(fp)
         else:
@@ -935,7 +960,7 @@
     try:
         from flamegraph import flamegraph
     except ImportError:
-        raise util.Abort(_(
+        raise error.Abort(_(
             'flamegraph not available - install from '
             'https://github.com/evanhempel/python-flamegraph'))
     # developer config: profiling.freq
@@ -960,7 +985,7 @@
     try:
         import statprof
     except ImportError:
-        raise util.Abort(_(
+        raise error.Abort(_(
             'statprof not available - install using "easy_install statprof"'))
 
     freq = ui.configint('profiling', 'freq', default=1000)
@@ -977,13 +1002,17 @@
         statprof.display(fp)
 
 def _runcommand(ui, options, cmd, cmdfunc):
+    """Enables the profiler if applicable.
+
+    ``profiling.enabled`` - boolean config that enables or disables profiling
+    """
     def checkargs():
         try:
             return cmdfunc()
         except error.SignatureError:
             raise error.CommandError(cmd, _("invalid arguments"))
 
-    if options['profile']:
+    if options['profile'] or ui.configbool('profiling', 'enabled'):
         profiler = os.getenv('HGPROF')
         if profiler is None:
             profiler = ui.config('profiling', 'type', default='ls')
@@ -993,7 +1022,10 @@
 
         output = ui.config('profiling', 'output')
 
-        if output:
+        if output == 'blackbox':
+            import StringIO
+            fp = StringIO.StringIO()
+        elif output:
             path = ui.expandpath(output)
             fp = open(path, 'wb')
         else:
@@ -1008,6 +1040,12 @@
                 return statprofile(ui, checkargs, fp)
         finally:
             if output:
+                if output == 'blackbox':
+                    val = "Profile:\n%s" % fp.getvalue()
+                    # ui.log treats the input as a format string,
+                    # so we need to escape any % signs.
+                    val = val.replace('%', '%%')
+                    ui.log('profile', val)
                 fp.close()
     else:
         return checkargs()
--- a/mercurial/error.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/error.py	Tue Oct 20 15:59:10 2015 -0500
@@ -11,6 +11,8 @@
 imports.
 """
 
+from __future__ import absolute_import
+
 # Do not import anything here, please
 
 class HintException(Exception):
@@ -32,7 +34,7 @@
         # Python 2.6+ complain about the 'message' property being deprecated
         self.lookupmessage = message
         if isinstance(name, str) and len(name) == 20:
-            from node import short
+            from .node import short
             name = short(name)
         RevlogError.__init__(self, '%s@%s: %s' % (index, name, message))
 
@@ -53,17 +55,23 @@
 
 class Abort(HintException):
     """Raised if a command needs to print an error and exit."""
-    pass
+
+class HookLoadError(Abort):
+    """raised when loading a hook fails, aborting an operation
+
+    Exists to allow more specialized catching."""
 
 class HookAbort(Abort):
     """raised when a validation hook fails, aborting an operation
 
     Exists to allow more specialized catching."""
-    pass
 
 class ConfigError(Abort):
     """Exception raised when parsing config files"""
 
+class UpdateAbort(Abort):
+    """Raised when an update is aborted for destination issue"""
+
 class OutOfBandError(Exception):
     """Exception raised when a remote repo reports failure"""
 
@@ -78,7 +86,7 @@
     """Exception raised when a {rev,file}set references an unknown identifier"""
 
     def __init__(self, function, symbols):
-        from i18n import _
+        from .i18n import _
         ParseError.__init__(self, _("unknown identifier: %s") % function)
         self.function = function
         self.symbols = symbols
@@ -97,7 +105,6 @@
 
 class RequirementError(RepoError):
     """Exception raised if .hg/requires has an unknown entry."""
-    pass
 
 class LockError(IOError):
     def __init__(self, errno, strerror, filename, desc):
@@ -112,6 +119,10 @@
 class LockUnavailable(LockError):
     pass
 
+# LockError is for errors while acquiring the lock -- this is unrelated
+class LockInheritanceContractViolation(RuntimeError):
+    pass
+
 class ResponseError(Exception):
     """Raised to print an error with part of output and exit."""
 
@@ -135,21 +146,31 @@
 class BundleValueError(ValueError):
     """error raised when bundle2 cannot be processed"""
 
-class UnsupportedPartError(BundleValueError):
-    def __init__(self, parttype=None, params=()):
+class BundleUnknownFeatureError(BundleValueError):
+    def __init__(self, parttype=None, params=(), values=()):
         self.parttype = parttype
         self.params = params
+        self.values = values
         if self.parttype is None:
             msg = 'Stream Parameter'
         else:
             msg = parttype
-        if self.params:
-            msg = '%s - %s' % (msg, ', '.join(self.params))
+        entries = self.params
+        if self.params and self.values:
+            assert len(self.params) == len(self.values)
+            entries = []
+            for idx, par in enumerate(self.params):
+                val = self.values[idx]
+                if val is None:
+                    entries.append(val)
+                else:
+                    entries.append("%s=%r" % (par, val))
+        if entries:
+            msg = '%s - %s' % (msg, ', '.join(entries))
         ValueError.__init__(self, msg)
 
 class ReadOnlyPartError(RuntimeError):
     """error raised when code tries to alter a part being generated"""
-    pass
 
 class PushkeyFailed(Abort):
     """error raised when a pushkey part failed to update a value"""
@@ -173,7 +194,7 @@
     """
 
     def __init__(self, filename, node, tombstone):
-        from node import short
+        from .node import short
         RevlogError.__init__(self, '%s:%s' % (filename, short(node)))
         self.tombstone = tombstone
 
@@ -184,3 +205,12 @@
     operation which replaces the entire base with new content. This ensures
     the delta may be applied by clones which have not censored the base.
     """
+
+class InvalidBundleSpecification(Exception):
+    """error raised when a bundle specification is invalid.
+
+    This is used for syntax errors as opposed to support errors.
+    """
+
+class UnsupportedBundleSpecification(Exception):
+    """error raised when a bundle specification is not supported."""
--- a/mercurial/exchange.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/exchange.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,14 +5,140 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-import time
 from i18n import _
 from node import hex, nullid
-import errno, urllib
-import util, scmutil, changegroup, base85, error, store
+import errno, urllib, urllib2
+import util, scmutil, changegroup, base85, error
 import discovery, phases, obsolete, bookmarks as bookmod, bundle2, pushkey
 import lock as lockmod
+import streamclone
+import sslutil
 import tags
+import url as urlmod
+
+# Maps bundle compression human names to internal representation.
+_bundlespeccompressions = {'none': None,
+                           'bzip2': 'BZ',
+                           'gzip': 'GZ',
+                          }
+
+# Maps bundle version human names to changegroup versions.
+_bundlespeccgversions = {'v1': '01',
+                         'v2': '02',
+                         'packed1': 's1',
+                         'bundle2': '02', #legacy
+                        }
+
+def parsebundlespec(repo, spec, strict=True, externalnames=False):
+    """Parse a bundle string specification into parts.
+
+    Bundle specifications denote a well-defined bundle/exchange format.
+    The content of a given specification should not change over time in
+    order to ensure that bundles produced by a newer version of Mercurial are
+    readable from an older version.
+
+    The string currently has the form:
+
+       <compression>-<type>[;<parameter0>[;<parameter1>]]
+
+    Where <compression> is one of the supported compression formats
+    and <type> is (currently) a version string. A ";" can follow the type and
+    all text afterwards is interpretted as URI encoded, ";" delimited key=value
+    pairs.
+
+    If ``strict`` is True (the default) <compression> is required. Otherwise,
+    it is optional.
+
+    If ``externalnames`` is False (the default), the human-centric names will
+    be converted to their internal representation.
+
+    Returns a 3-tuple of (compression, version, parameters). Compression will
+    be ``None`` if not in strict mode and a compression isn't defined.
+
+    An ``InvalidBundleSpecification`` is raised when the specification is
+    not syntactically well formed.
+
+    An ``UnsupportedBundleSpecification`` is raised when the compression or
+    bundle type/version is not recognized.
+
+    Note: this function will likely eventually return a more complex data
+    structure, including bundle2 part information.
+    """
+    def parseparams(s):
+        if ';' not in s:
+            return s, {}
+
+        params = {}
+        version, paramstr = s.split(';', 1)
+
+        for p in paramstr.split(';'):
+            if '=' not in p:
+                raise error.InvalidBundleSpecification(
+                    _('invalid bundle specification: '
+                      'missing "=" in parameter: %s') % p)
+
+            key, value = p.split('=', 1)
+            key = urllib.unquote(key)
+            value = urllib.unquote(value)
+            params[key] = value
+
+        return version, params
+
+
+    if strict and '-' not in spec:
+        raise error.InvalidBundleSpecification(
+                _('invalid bundle specification; '
+                  'must be prefixed with compression: %s') % spec)
+
+    if '-' in spec:
+        compression, version = spec.split('-', 1)
+
+        if compression not in _bundlespeccompressions:
+            raise error.UnsupportedBundleSpecification(
+                    _('%s compression is not supported') % compression)
+
+        version, params = parseparams(version)
+
+        if version not in _bundlespeccgversions:
+            raise error.UnsupportedBundleSpecification(
+                    _('%s is not a recognized bundle version') % version)
+    else:
+        # Value could be just the compression or just the version, in which
+        # case some defaults are assumed (but only when not in strict mode).
+        assert not strict
+
+        spec, params = parseparams(spec)
+
+        if spec in _bundlespeccompressions:
+            compression = spec
+            version = 'v1'
+            if 'generaldelta' in repo.requirements:
+                version = 'v2'
+        elif spec in _bundlespeccgversions:
+            if spec == 'packed1':
+                compression = 'none'
+            else:
+                compression = 'bzip2'
+            version = spec
+        else:
+            raise error.UnsupportedBundleSpecification(
+                    _('%s is not a recognized bundle specification') % spec)
+
+    # The specification for packed1 can optionally declare the data formats
+    # required to apply it. If we see this metadata, compare against what the
+    # repo supports and error if the bundle isn't compatible.
+    if version == 'packed1' and 'requirements' in params:
+        requirements = set(params['requirements'].split(','))
+        missingreqs = requirements - repo.supportedformats
+        if missingreqs:
+            raise error.UnsupportedBundleSpecification(
+                    _('missing support for repository features: %s') %
+                      ', '.join(sorted(missingreqs)))
+
+    if not externalnames:
+        compression = _bundlespeccompressions[compression]
+        version = _bundlespeccgversions[version]
+    return compression, version, params
 
 def readbundle(ui, fh, fname, vfs=None):
     header = changegroup.readexactly(fh, 4)
@@ -30,15 +156,17 @@
     magic, version = header[0:2], header[2:4]
 
     if magic != 'HG':
-        raise util.Abort(_('%s: not a Mercurial bundle') % fname)
+        raise error.Abort(_('%s: not a Mercurial bundle') % fname)
     if version == '10':
         if alg is None:
             alg = changegroup.readexactly(fh, 2)
         return changegroup.cg1unpacker(fh, alg)
     elif version.startswith('2'):
         return bundle2.getunbundler(ui, fh, magicstring=magic + version)
+    elif version == 'S1':
+        return streamclone.streamcloneapplier(fh)
     else:
-        raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
+        raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
 
 def buildobsmarkerspart(bundler, markers):
     """add an obsmarker part to the bundler with <markers>
@@ -50,7 +178,7 @@
         remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
         version = obsolete.commonversion(remoteversions)
         if version is None:
-            raise ValueError('bundler do not support common obsmarker format')
+            raise ValueError('bundler does not support common obsmarker format')
         stream = obsolete.encodemarkers(markers, True, version=version)
         return bundler.newpart('obsmarkers', data=stream)
     return None
@@ -147,7 +275,7 @@
         #
         # We can pick:
         # * missingheads part of common (::commonheads)
-        common = set(self.outgoing.common)
+        common = self.outgoing.common
         nm = self.repo.changelog.nodemap
         cheads = [node for node in self.revs if nm[node] in common]
         # and
@@ -176,7 +304,8 @@
               }
 
 
-def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=()):
+def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
+         opargs=None):
     '''Push outgoing changesets (limited by revs) from a local
     repository to remote. Return an integer:
       - None means nothing to push
@@ -185,7 +314,10 @@
         we have outgoing changesets but refused to push
       - other values as described by addchangegroup()
     '''
-    pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks)
+    if opargs is None:
+        opargs = {}
+    pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
+                           **opargs)
     if pushop.remote.local():
         missing = (set(pushop.repo.requirements)
                    - pushop.remote.local().supported)
@@ -193,7 +325,7 @@
             msg = _("required features are not"
                     " supported in the destination:"
                     " %s") % (', '.join(sorted(missing)))
-            raise util.Abort(msg)
+            raise error.Abort(msg)
 
     # there are two ways to push to remote repo:
     #
@@ -204,7 +336,7 @@
     # servers, http servers).
 
     if not pushop.remote.canpush():
-        raise util.Abort(_("destination does not support push"))
+        raise error.Abort(_("destination does not support push"))
     # get local lock as we might write phase data
     localwlock = locallock = None
     try:
@@ -226,7 +358,7 @@
         pushop.ui.debug(msg)
     try:
         if pushop.locallocked:
-            pushop.trmanager = transactionmanager(repo,
+            pushop.trmanager = transactionmanager(pushop.repo,
                                                   'push-response',
                                                   pushop.remote.url())
         pushop.repo.checkpush(pushop)
@@ -435,9 +567,9 @@
             for node in outgoing.missingheads:
                 ctx = unfi[node]
                 if ctx.obsolete():
-                    raise util.Abort(mso % ctx)
+                    raise error.Abort(mso % ctx)
                 elif ctx.troubled():
-                    raise util.Abort(mst[ctx.troubles()[0]] % ctx)
+                    raise error.Abort(mst[ctx.troubles()[0]] % ctx)
 
         # internal config: bookmarks.pushing
         newbm = pushop.ui.configlist('bookmarks', 'pushing')
@@ -475,6 +607,14 @@
         return func
     return dec
 
+def _pushb2ctxcheckheads(pushop, bundler):
+    """Generate race condition checking parts
+
+    Exists as an independent function to aid extensions
+    """
+    if not pushop.force:
+        bundler.newpart('check:heads', data=iter(pushop.remoteheads))
+
 @b2partsgenerator('changeset')
 def _pushb2ctx(pushop, bundler):
     """handle changegroup push through bundle2
@@ -490,8 +630,9 @@
     pushop.repo.prepushoutgoinghooks(pushop.repo,
                                      pushop.remote,
                                      pushop.outgoing)
-    if not pushop.force:
-        bundler.newpart('check:heads', data=iter(pushop.remoteheads))
+
+    _pushb2ctxcheckheads(pushop, bundler)
+
     b2caps = bundle2.bundle2caps(pushop.remote)
     version = None
     cgversions = b2caps.get('changegroup')
@@ -571,7 +712,7 @@
 
 @b2partsgenerator('bookmarks')
 def _pushb2bookmarks(pushop, bundler):
-    """handle phase push through bundle2"""
+    """handle bookmark push through bundle2"""
     if 'bookmarks' in pushop.stepsdone:
         return
     b2caps = bundle2.bundle2caps(pushop.remote)
@@ -649,14 +790,14 @@
         try:
             reply = pushop.remote.unbundle(stream, ['force'], 'push')
         except error.BundleValueError as exc:
-            raise util.Abort('missing support for %s' % exc)
+            raise error.Abort('missing support for %s' % exc)
         try:
             trgetter = None
             if pushback:
                 trgetter = pushop.trmanager.transaction
             op = bundle2.processbundle(pushop.repo, reply, trgetter)
         except error.BundleValueError as exc:
-            raise util.Abort('missing support for %s' % exc)
+            raise error.Abort('missing support for %s' % exc)
     except error.PushkeyFailed as exc:
         partid = int(exc.partid)
         if partid not in pushop.pkfailcb:
@@ -838,7 +979,7 @@
     """
 
     def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
-                 remotebookmarks=None):
+                 remotebookmarks=None, streamclonerequested=None):
         # repo we pull into
         self.repo = repo
         # repo we pull from
@@ -849,6 +990,8 @@
         self.explicitbookmarks = bookmarks
         # do we force pull?
         self.force = force
+        # whether a streaming clone was requested
+        self.streamclonerequested = streamclonerequested
         # transaction manager
         self.trmanager = None
         # set of common changeset between local and remote before pull
@@ -863,6 +1006,8 @@
         self.cgresult = None
         # list of step already done
         self.stepsdone = set()
+        # Whether we attempted a clone from pre-generated bundles.
+        self.clonebundleattempted = False
 
     @util.propertycache
     def pulledsubset(self):
@@ -882,6 +1027,14 @@
             # sync on this subset
             return self.heads
 
+    @util.propertycache
+    def canusebundle2(self):
+        return _canusebundle2(self)
+
+    @util.propertycache
+    def remotebundle2caps(self):
+        return bundle2.bundle2caps(self.remote)
+
     def gettransaction(self):
         # deprecated; talk to trmanager directly
         return self.trmanager.transaction()
@@ -916,24 +1069,49 @@
         if self._tr is not None:
             self._tr.release()
 
-def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None):
+def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
+         streamclonerequested=None):
+    """Fetch repository data from a remote.
+
+    This is the main function used to retrieve data from a remote repository.
+
+    ``repo`` is the local repository to clone into.
+    ``remote`` is a peer instance.
+    ``heads`` is an iterable of revisions we want to pull. ``None`` (the
+    default) means to pull everything from the remote.
+    ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
+    default, all remote bookmarks are pulled.
+    ``opargs`` are additional keyword arguments to pass to ``pulloperation``
+    initialization.
+    ``streamclonerequested`` is a boolean indicating whether a "streaming
+    clone" is requested. A "streaming clone" is essentially a raw file copy
+    of revlogs from the server. This only works when the local repository is
+    empty. The default value of ``None`` means to respect the server
+    configuration for preferring stream clones.
+
+    Returns the ``pulloperation`` created for this pull.
+    """
     if opargs is None:
         opargs = {}
     pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
-                           **opargs)
+                           streamclonerequested=streamclonerequested, **opargs)
     if pullop.remote.local():
         missing = set(pullop.remote.requirements) - pullop.repo.supported
         if missing:
             msg = _("required features are not"
                     " supported in the destination:"
                     " %s") % (', '.join(sorted(missing)))
-            raise util.Abort(msg)
+            raise error.Abort(msg)
 
     lock = pullop.repo.lock()
     try:
         pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
+        streamclone.maybeperformlegacystreamclone(pullop)
+        # This should ideally be in _pullbundle2(). However, it needs to run
+        # before discovery to avoid extra work.
+        _maybeapplyclonebundle(pullop)
         _pulldiscovery(pullop)
-        if _canusebundle2(pullop):
+        if pullop.canusebundle2:
             _pullbundle2(pullop)
         _pullchangeset(pullop)
         _pullphase(pullop)
@@ -984,8 +1162,7 @@
     discovery to reduce the chance and impact of race conditions."""
     if pullop.remotebookmarks is not None:
         return
-    if (_canusebundle2(pullop)
-            and 'listkeys' in bundle2.bundle2caps(pullop.remote)):
+    if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
         # all known bundle2 servers now support listkeys, but lets be nice with
         # new implementation.
         return
@@ -1034,28 +1211,42 @@
     """pull data using bundle2
 
     For now, the only supported data are changegroup."""
-    remotecaps = bundle2.bundle2caps(pullop.remote)
     kwargs = {'bundlecaps': caps20to10(pullop.repo)}
+
+    streaming, streamreqs = streamclone.canperformstreamclone(pullop)
+
     # pulling changegroup
     pullop.stepsdone.add('changegroup')
 
     kwargs['common'] = pullop.common
     kwargs['heads'] = pullop.heads or pullop.rheads
     kwargs['cg'] = pullop.fetch
-    if 'listkeys' in remotecaps:
+    if 'listkeys' in pullop.remotebundle2caps:
         kwargs['listkeys'] = ['phase']
         if pullop.remotebookmarks is None:
             # make sure to always includes bookmark data when migrating
             # `hg incoming --bundle` to using this function.
             kwargs['listkeys'].append('bookmarks')
-    if not pullop.fetch:
+
+    # If this is a full pull / clone and the server supports the clone bundles
+    # feature, tell the server whether we attempted a clone bundle. The
+    # presence of this flag indicates the client supports clone bundles. This
+    # will enable the server to treat clients that support clone bundles
+    # differently from those that don't.
+    if (pullop.remote.capable('clonebundles')
+        and pullop.heads is None and list(pullop.common) == [nullid]):
+        kwargs['cbattempted'] = pullop.clonebundleattempted
+
+    if streaming:
+        pullop.repo.ui.status(_('streaming all changes\n'))
+    elif not pullop.fetch:
         pullop.repo.ui.status(_("no changes found\n"))
         pullop.cgresult = 0
     else:
         if pullop.heads is None and list(pullop.common) == [nullid]:
             pullop.repo.ui.status(_("requesting all changes\n"))
     if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
-        remoteversions = bundle2.obsmarkersversion(remotecaps)
+        remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
         if obsolete.commonversion(remoteversions) is not None:
             kwargs['obsmarkers'] = True
             pullop.stepsdone.add('obsmarkers')
@@ -1064,7 +1255,7 @@
     try:
         op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
     except error.BundleValueError as exc:
-        raise util.Abort('missing support for %s' % exc)
+        raise error.Abort('missing support for %s' % exc)
 
     if pullop.fetch:
         results = [cg['return'] for cg in op.records['changegroup']]
@@ -1114,13 +1305,12 @@
     elif pullop.heads is None:
         cg = pullop.remote.changegroup(pullop.fetch, 'pull')
     elif not pullop.remote.capable('changegroupsubset'):
-        raise util.Abort(_("partial pull cannot be done because "
+        raise error.Abort(_("partial pull cannot be done because "
                            "other repository doesn't support "
                            "changegroupsubset."))
     else:
         cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
-    pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
-                                                 pullop.remote.url())
+    pullop.cgresult = cg.apply(pullop.repo, 'pull', pullop.remote.url())
 
 def _pullphase(pullop):
     # Get remote phases data from remote
@@ -1397,7 +1587,8 @@
     If the push was raced as PushRaced exception is raised."""
     r = 0
     # need a transaction when processing a bundle2 stream
-    wlock = lock = tr = None
+    # [wlock, lock, tr] - needs to be an array so nested functions can modify it
+    lockandtr = [None, None, None]
     recordout = None
     # quick fix for output mismatch with bundle2 in 3.4
     captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture',
@@ -1410,23 +1601,33 @@
         if util.safehasattr(cg, 'params'):
             r = None
             try:
-                wlock = repo.wlock()
-                lock = repo.lock()
-                tr = repo.transaction(source)
-                tr.hookargs['source'] = source
-                tr.hookargs['url'] = url
-                tr.hookargs['bundle2'] = '1'
-                op = bundle2.bundleoperation(repo, lambda: tr,
+                def gettransaction():
+                    if not lockandtr[2]:
+                        lockandtr[0] = repo.wlock()
+                        lockandtr[1] = repo.lock()
+                        lockandtr[2] = repo.transaction(source)
+                        lockandtr[2].hookargs['source'] = source
+                        lockandtr[2].hookargs['url'] = url
+                        lockandtr[2].hookargs['bundle2'] = '1'
+                    return lockandtr[2]
+
+                # Do greedy locking by default until we're satisfied with lazy
+                # locking.
+                if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
+                    gettransaction()
+
+                op = bundle2.bundleoperation(repo, gettransaction,
                                              captureoutput=captureoutput)
                 try:
-                    r = bundle2.processbundle(repo, cg, op=op)
+                    op = bundle2.processbundle(repo, cg, op=op)
                 finally:
                     r = op.reply
                     if captureoutput and r is not None:
                         repo.ui.pushbuffer(error=True, subproc=True)
                         def recordout(output):
                             r.newpart('output', data=output, mandatory=False)
-                tr.close()
+                if lockandtr[2] is not None:
+                    lockandtr[2].close()
             except BaseException as exc:
                 exc.duringunbundle2 = True
                 if captureoutput and r is not None:
@@ -1437,138 +1638,213 @@
                         parts.append(part)
                 raise
         else:
-            lock = repo.lock()
-            r = changegroup.addchangegroup(repo, cg, source, url)
+            lockandtr[1] = repo.lock()
+            r = cg.apply(repo, source, url)
     finally:
-        lockmod.release(tr, lock, wlock)
+        lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
         if recordout is not None:
             recordout(repo.ui.popbuffer())
     return r
 
-# This is it's own function so extensions can override it.
-def _walkstreamfiles(repo):
-    return repo.store.walk()
+def _maybeapplyclonebundle(pullop):
+    """Apply a clone bundle from a remote, if possible."""
+
+    repo = pullop.repo
+    remote = pullop.remote
+
+    if not repo.ui.configbool('experimental', 'clonebundles', False):
+        return
+
+    if pullop.heads:
+        return
+
+    if not remote.capable('clonebundles'):
+        return
+
+    res = remote._call('clonebundles')
+
+    # If we call the wire protocol command, that's good enough to record the
+    # attempt.
+    pullop.clonebundleattempted = True
+
+    entries = parseclonebundlesmanifest(repo, res)
+    if not entries:
+        repo.ui.note(_('no clone bundles available on remote; '
+                       'falling back to regular clone\n'))
+        return
 
-def generatestreamclone(repo):
-    """Emit content for a streaming clone.
+    entries = filterclonebundleentries(repo, entries)
+    if not entries:
+        # There is a thundering herd concern here. However, if a server
+        # operator doesn't advertise bundles appropriate for its clients,
+        # they deserve what's coming. Furthermore, from a client's
+        # perspective, no automatic fallback would mean not being able to
+        # clone!
+        repo.ui.warn(_('no compatible clone bundles available on server; '
+                       'falling back to regular clone\n'))
+        repo.ui.warn(_('(you may want to report this to the server '
+                       'operator)\n'))
+        return
+
+    entries = sortclonebundleentries(repo.ui, entries)
 
-    This is a generator of raw chunks that constitute a streaming clone.
+    url = entries[0]['URL']
+    repo.ui.status(_('applying clone bundle from %s\n') % url)
+    if trypullbundlefromurl(repo.ui, repo, url):
+        repo.ui.status(_('finished applying clone bundle\n'))
+    # Bundle failed.
+    #
+    # We abort by default to avoid the thundering herd of
+    # clients flooding a server that was expecting expensive
+    # clone load to be offloaded.
+    elif repo.ui.configbool('ui', 'clonebundlefallback', False):
+        repo.ui.warn(_('falling back to normal clone\n'))
+    else:
+        raise error.Abort(_('error applying bundle'),
+                          hint=_('if this error persists, consider contacting '
+                                 'the server operator or disable clone '
+                                 'bundles via '
+                                 '"--config experimental.clonebundles=false"'))
 
-    The stream begins with a line of 2 space-delimited integers containing the
-    number of entries and total bytes size.
+def parseclonebundlesmanifest(repo, s):
+    """Parses the raw text of a clone bundles manifest.
+
+    Returns a list of dicts. The dicts have a ``URL`` key corresponding
+    to the URL and other keys are the attributes for the entry.
+    """
+    m = []
+    for line in s.splitlines():
+        fields = line.split()
+        if not fields:
+            continue
+        attrs = {'URL': fields[0]}
+        for rawattr in fields[1:]:
+            key, value = rawattr.split('=', 1)
+            key = urllib.unquote(key)
+            value = urllib.unquote(value)
+            attrs[key] = value
 
-    Next, are N entries for each file being transferred. Each file entry starts
-    as a line with the file name and integer size delimited by a null byte.
-    The raw file data follows. Following the raw file data is the next file
-    entry, or EOF.
+            # Parse BUNDLESPEC into components. This makes client-side
+            # preferences easier to specify since you can prefer a single
+            # component of the BUNDLESPEC.
+            if key == 'BUNDLESPEC':
+                try:
+                    comp, version, params = parsebundlespec(repo, value,
+                                                            externalnames=True)
+                    attrs['COMPRESSION'] = comp
+                    attrs['VERSION'] = version
+                except error.InvalidBundleSpecification:
+                    pass
+                except error.UnsupportedBundleSpecification:
+                    pass
+
+        m.append(attrs)
+
+    return m
+
+def filterclonebundleentries(repo, entries):
+    """Remove incompatible clone bundle manifest entries.
+
+    Accepts a list of entries parsed with ``parseclonebundlesmanifest``
+    and returns a new list consisting of only the entries that this client
+    should be able to apply.
+
+    There is no guarantee we'll be able to apply all returned entries because
+    the metadata we use to filter on may be missing or wrong.
+    """
+    newentries = []
+    for entry in entries:
+        spec = entry.get('BUNDLESPEC')
+        if spec:
+            try:
+                parsebundlespec(repo, spec, strict=True)
+            except error.InvalidBundleSpecification as e:
+                repo.ui.debug(str(e) + '\n')
+                continue
+            except error.UnsupportedBundleSpecification as e:
+                repo.ui.debug('filtering %s because unsupported bundle '
+                              'spec: %s\n' % (entry['URL'], str(e)))
+                continue
+
+        if 'REQUIRESNI' in entry and not sslutil.hassni:
+            repo.ui.debug('filtering %s because SNI not supported\n' %
+                          entry['URL'])
+            continue
+
+        newentries.append(entry)
 
-    When used on the wire protocol, an additional line indicating protocol
-    success will be prepended to the stream. This function is not responsible
-    for adding it.
+    return newentries
+
+def sortclonebundleentries(ui, entries):
+    # experimental config: experimental.clonebundleprefers
+    prefers = ui.configlist('experimental', 'clonebundleprefers', default=[])
+    if not prefers:
+        return list(entries)
+
+    prefers = [p.split('=', 1) for p in prefers]
+
+    # Our sort function.
+    def compareentry(a, b):
+        for prefkey, prefvalue in prefers:
+            avalue = a.get(prefkey)
+            bvalue = b.get(prefkey)
+
+            # Special case for b missing attribute and a matches exactly.
+            if avalue is not None and bvalue is None and avalue == prefvalue:
+                return -1
+
+            # Special case for a missing attribute and b matches exactly.
+            if bvalue is not None and avalue is None and bvalue == prefvalue:
+                return 1
 
-    This function will obtain a repository lock to ensure a consistent view of
-    the store is captured. It therefore may raise LockError.
-    """
-    entries = []
-    total_bytes = 0
-    # Get consistent snapshot of repo, lock during scan.
+            # We can't compare unless attribute present on both.
+            if avalue is None or bvalue is None:
+                continue
+
+            # Same values should fall back to next attribute.
+            if avalue == bvalue:
+                continue
+
+            # Exact matches come first.
+            if avalue == prefvalue:
+                return -1
+            if bvalue == prefvalue:
+                return 1
+
+            # Fall back to next attribute.
+            continue
+
+        # If we got here we couldn't sort by attributes and prefers. Fall
+        # back to index order.
+        return 0
+
+    return sorted(entries, cmp=compareentry)
+
+def trypullbundlefromurl(ui, repo, url):
+    """Attempt to apply a bundle from a URL."""
     lock = repo.lock()
     try:
-        repo.ui.debug('scanning\n')
-        for name, ename, size in _walkstreamfiles(repo):
-            if size:
-                entries.append((name, size))
-                total_bytes += size
-    finally:
-            lock.release()
-
-    repo.ui.debug('%d files, %d bytes to transfer\n' %
-                  (len(entries), total_bytes))
-    yield '%d %d\n' % (len(entries), total_bytes)
-
-    svfs = repo.svfs
-    oldaudit = svfs.mustaudit
-    debugflag = repo.ui.debugflag
-    svfs.mustaudit = False
-
-    try:
-        for name, size in entries:
-            if debugflag:
-                repo.ui.debug('sending %s (%d bytes)\n' % (name, size))
-            # partially encode name over the wire for backwards compat
-            yield '%s\0%d\n' % (store.encodedir(name), size)
-            if size <= 65536:
-                fp = svfs(name)
-                try:
-                    data = fp.read(size)
-                finally:
-                    fp.close()
-                yield data
-            else:
-                for chunk in util.filechunkiter(svfs(name), limit=size):
-                    yield chunk
-    finally:
-        svfs.mustaudit = oldaudit
-
-def consumestreamclone(repo, fp):
-    """Apply the contents from a streaming clone file.
-
-    This takes the output from "streamout" and applies it to the specified
-    repository.
+        tr = repo.transaction('bundleurl')
+        try:
+            try:
+                fh = urlmod.open(ui, url)
+                cg = readbundle(ui, fh, 'stream')
 
-    Like "streamout," the status line added by the wire protocol is not handled
-    by this function.
-    """
-    lock = repo.lock()
-    try:
-        repo.ui.status(_('streaming all changes\n'))
-        l = fp.readline()
-        try:
-            total_files, total_bytes = map(int, l.split(' ', 1))
-        except (ValueError, TypeError):
-            raise error.ResponseError(
-                _('unexpected response from remote server:'), l)
-        repo.ui.status(_('%d files to transfer, %s of data\n') %
-                       (total_files, util.bytecount(total_bytes)))
-        handled_bytes = 0
-        repo.ui.progress(_('clone'), 0, total=total_bytes)
-        start = time.time()
+                if isinstance(cg, bundle2.unbundle20):
+                    bundle2.processbundle(repo, cg, lambda: tr)
+                elif isinstance(cg, streamclone.streamcloneapplier):
+                    cg.apply(repo)
+                else:
+                    cg.apply(repo, 'clonebundles', url)
+                tr.close()
+                return True
+            except urllib2.HTTPError as e:
+                ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
+            except urllib2.URLError as e:
+                ui.warn(_('error fetching bundle: %s\n') % e.reason[1])
 
-        tr = repo.transaction(_('clone'))
-        try:
-            for i in xrange(total_files):
-                # XXX doesn't support '\n' or '\r' in filenames
-                l = fp.readline()
-                try:
-                    name, size = l.split('\0', 1)
-                    size = int(size)
-                except (ValueError, TypeError):
-                    raise error.ResponseError(
-                        _('unexpected response from remote server:'), l)
-                if repo.ui.debugflag:
-                    repo.ui.debug('adding %s (%s)\n' %
-                                  (name, util.bytecount(size)))
-                # for backwards compat, name was partially encoded
-                ofp = repo.svfs(store.decodedir(name), 'w')
-                for chunk in util.filechunkiter(fp, limit=size):
-                    handled_bytes += len(chunk)
-                    repo.ui.progress(_('clone'), handled_bytes,
-                                     total=total_bytes)
-                    ofp.write(chunk)
-                ofp.close()
-            tr.close()
+            return False
         finally:
             tr.release()
-
-        # Writing straight to files circumvented the inmemory caches
-        repo.invalidate()
-
-        elapsed = time.time() - start
-        if elapsed <= 0:
-            elapsed = 0.001
-        repo.ui.progress(_('clone'), None)
-        repo.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
-                       (util.bytecount(total_bytes), elapsed,
-                        util.bytecount(total_bytes / elapsed)))
     finally:
         lock.release()
--- a/mercurial/exewrapper.c	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/exewrapper.c	Tue Oct 20 15:59:10 2015 -0500
@@ -67,10 +67,19 @@
 	}
 
 	pydll = NULL;
+	/*
+	We first check, that environment variable PYTHONHOME is *not* set.
+	This just mimicks the behavior of the regular python.exe, which uses
+	PYTHONHOME to find its installation directory (if it has been set).
+	Note: Users of HackableMercurial are expected to *not* set PYTHONHOME!
+	*/
 	if (GetEnvironmentVariable("PYTHONHOME", envpyhome,
 				   sizeof(envpyhome)) == 0)
 	{
-		/* environment var PYTHONHOME is not set */
+		/*
+		Environment var PYTHONHOME is *not* set. Let's see if we are
+		running inside a HackableMercurial.
+		*/
 
 		p = strrchr(pyhome, '\\');
 		if (p == NULL) {
@@ -90,7 +99,8 @@
 			strcat_s(pydllfile, sizeof(pydllfile), "\\" HGPYTHONLIB);
 			pydll = LoadLibrary(pydllfile);
 			if (pydll == NULL) {
-				err = "failed to load private Python DLL";
+				err = "failed to load private Python DLL "
+				      HGPYTHONLIB ".dll";
 				goto bail;
 			}
 			Py_SetPythonHome = (void*)GetProcAddress(pydll,
@@ -106,7 +116,7 @@
 	if (pydll == NULL) {
 		pydll = LoadLibrary(HGPYTHONLIB);
 		if (pydll == NULL) {
-			err = "failed to load Python DLL";
+			err = "failed to load Python DLL " HGPYTHONLIB ".dll";
 			goto bail;
 		}
 	}
--- a/mercurial/extensions.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/extensions.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,9 +5,21 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-import imp, os
-import util, cmdutil, error
-from i18n import _, gettext
+from __future__ import absolute_import
+
+import imp
+import os
+
+from .i18n import (
+    _,
+    gettext,
+)
+
+from . import (
+    cmdutil,
+    error,
+    util,
+)
 
 _extensions = {}
 _aftercallbacks = {}
@@ -193,7 +205,7 @@
 
       The ``remotenames`` extension adds the ``--remote`` and ``--all`` (``-a``)
       flags to the bookmarks command. Either flag will show the remote bookmarks
-      known to the repository; ``--remote`` will also supress the output of the
+      known to the repository; ``--remote`` will also suppress the output of the
       local bookmarks.
       """
 
--- a/mercurial/fancyopts.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/fancyopts.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,9 +5,12 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
+from __future__ import absolute_import
+
 import getopt
-import util
-from i18n import _
+
+from .i18n import _
+from . import error
 
 def gnugetopt(args, options, longoptions):
     """Parse options mostly like getopt.gnu_getopt.
@@ -111,7 +114,7 @@
             try:
                 state[name] = int(val)
             except ValueError:
-                raise util.Abort(_('invalid value %r for option %s, '
+                raise error.Abort(_('invalid value %r for option %s, '
                                    'expected int') % (val, opt))
         elif t is type(''):
             state[name] = val
--- a/mercurial/filelog.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/filelog.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,8 +5,16 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-import error, mdiff, revlog
-import re, struct
+from __future__ import absolute_import
+
+import re
+import struct
+
+from . import (
+    error,
+    mdiff,
+    revlog,
+)
 
 _mdre = re.compile('\1\n')
 def parsemeta(text):
--- a/mercurial/filemerge.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/filemerge.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,11 +5,25 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-from node import short
-from i18n import _
-import util, simplemerge, match, error, templater, templatekw
-import os, tempfile, re, filecmp
-import tagmerge
+from __future__ import absolute_import
+
+import filecmp
+import os
+import re
+import tempfile
+
+from .i18n import _
+from .node import short
+
+from . import (
+    error,
+    match,
+    simplemerge,
+    tagmerge,
+    templatekw,
+    templater,
+    util,
+)
 
 def _toolstr(ui, tool, part, default=""):
     return ui.config("merge-tools", tool + "." + part, default)
@@ -24,7 +38,12 @@
 # Merge tools to document.
 internalsdoc = {}
 
-def internaltool(name, trymerge, onfailure=None):
+# internal tool merge types
+nomerge = None
+mergeonly = 'mergeonly'  # just the full merge, no premerge
+fullmerge = 'fullmerge'  # both premerge and merge
+
+def internaltool(name, mergetype, onfailure=None, precheck=None):
     '''return a decorator for populating internal merge tool table'''
     def decorator(func):
         fullname = ':' + name
@@ -32,8 +51,9 @@
         internals[fullname] = func
         internals['internal:' + name] = func
         internalsdoc[fullname] = func
-        func.trymerge = trymerge
+        func.mergetype = mergetype
         func.onfailure = onfailure
+        func.precheck = precheck
         return func
     return decorator
 
@@ -100,12 +120,15 @@
 
     # then merge tools
     tools = {}
+    disabled = set()
     for k, v in ui.configitems("merge-tools"):
         t = k.split('.')[0]
         if t not in tools:
             tools[t] = int(_toolstr(ui, t, "priority", "0"))
+        if _toolbool(ui, t, "disabled", False):
+            disabled.add(t)
     names = tools.keys()
-    tools = sorted([(-p, t) for t, p in tools.items()])
+    tools = sorted([(-p, t) for t, p in tools.items() if t not in disabled])
     uimerge = ui.config("ui", "merge")
     if uimerge:
         if uimerge not in names:
@@ -145,7 +168,7 @@
             if newdata != data:
                 util.writefile(file, newdata)
 
-@internaltool('prompt', False)
+@internaltool('prompt', nomerge)
 def _iprompt(repo, mynode, orig, fcd, fco, fca, toolconf):
     """Asks the user which of the local or the other version to keep as
     the merged version."""
@@ -159,18 +182,18 @@
     else:
         return _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf)
 
-@internaltool('local', False)
+@internaltool('local', nomerge)
 def _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf):
     """Uses the local version of files as the merged version."""
     return 0
 
-@internaltool('other', False)
+@internaltool('other', nomerge)
 def _iother(repo, mynode, orig, fcd, fco, fca, toolconf):
     """Uses the other version of files as the merged version."""
     repo.wwrite(fcd.path(), fco.data(), fco.flags())
     return 0
 
-@internaltool('fail', False)
+@internaltool('fail', nomerge)
 def _ifail(repo, mynode, orig, fcd, fco, fca, toolconf):
     """
     Rather than attempting to merge files that were modified on both
@@ -213,33 +236,56 @@
             util.copyfile(back, a) # restore from backup and try again
     return 1 # continue merging
 
-@internaltool('merge', True,
-              _("merging %s incomplete! "
-                "(edit conflicts, then use 'hg resolve --mark')\n"))
+def _symlinkcheck(repo, mynode, orig, fcd, fco, fca, toolconf):
+    tool, toolpath, binary, symlink = toolconf
+    if symlink:
+        repo.ui.warn(_('warning: internal %s cannot merge symlinks '
+                       'for %s\n') % (tool, fcd.path()))
+        return False
+    return True
+
+def _merge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels, mode):
+    """
+    Uses the internal non-interactive simple merge algorithm for merging
+    files. It will fail if there are any conflicts and leave markers in
+    the partially merged file. Markers will have two sections, one for each side
+    of merge, unless mode equals 'union' which suppresses the markers."""
+    a, b, c, back = files
+
+    ui = repo.ui
+
+    r = simplemerge.simplemerge(ui, a, b, c, label=labels, mode=mode)
+    return True, r
+
+@internaltool('union', fullmerge,
+              _("warning: conflicts while merging %s! "
+                "(edit, then use 'hg resolve --mark')\n"),
+              precheck=_symlinkcheck)
+def _iunion(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
+    """
+    Uses the internal non-interactive simple merge algorithm for merging
+    files. It will use both left and right sides for conflict regions.
+    No markers are inserted."""
+    return _merge(repo, mynode, orig, fcd, fco, fca, toolconf,
+                  files, labels, 'union')
+
+@internaltool('merge', fullmerge,
+              _("warning: conflicts while merging %s! "
+                "(edit, then use 'hg resolve --mark')\n"),
+              precheck=_symlinkcheck)
 def _imerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
     """
     Uses the internal non-interactive simple merge algorithm for merging
     files. It will fail if there are any conflicts and leave markers in
     the partially merged file. Markers will have two sections, one for each side
     of merge."""
-    tool, toolpath, binary, symlink = toolconf
-    if symlink:
-        repo.ui.warn(_('warning: internal :merge cannot merge symlinks '
-                       'for %s\n') % fcd.path())
-        return False, 1
-    r = _premerge(repo, toolconf, files, labels=labels)
-    if r:
-        a, b, c, back = files
+    return _merge(repo, mynode, orig, fcd, fco, fca, toolconf,
+                  files, labels, 'merge')
 
-        ui = repo.ui
-
-        r = simplemerge.simplemerge(ui, a, b, c, label=labels)
-        return True, r
-    return False, 0
-
-@internaltool('merge3', True,
-              _("merging %s incomplete! "
-                "(edit conflicts, then use 'hg resolve --mark')\n"))
+@internaltool('merge3', fullmerge,
+              _("warning: conflicts while merging %s! "
+                "(edit, then use 'hg resolve --mark')\n"),
+              precheck=_symlinkcheck)
 def _imerge3(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
     """
     Uses the internal non-interactive simple merge algorithm for merging
@@ -252,7 +298,39 @@
         labels.append('base')
     return _imerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels)
 
-@internaltool('tagmerge', True,
+def _imergeauto(repo, mynode, orig, fcd, fco, fca, toolconf, files,
+                labels=None, localorother=None):
+    """
+    Generic driver for _imergelocal and _imergeother
+    """
+    assert localorother is not None
+    tool, toolpath, binary, symlink = toolconf
+    if symlink:
+        repo.ui.warn(_('warning: :merge-%s cannot merge symlinks '
+                       'for %s\n') % (localorother, fcd.path()))
+        return False, 1
+    a, b, c, back = files
+    r = simplemerge.simplemerge(repo.ui, a, b, c, label=labels,
+                                localorother=localorother)
+    return True, r
+
+@internaltool('merge-local', mergeonly)
+def _imergelocal(*args, **kwargs):
+    """
+    Like :merge, but resolve all conflicts non-interactively in favor
+    of the local changes."""
+    success, status = _imergeauto(localorother='local', *args, **kwargs)
+    return success, status
+
+@internaltool('merge-other', mergeonly)
+def _imergeother(*args, **kwargs):
+    """
+    Like :merge, but resolve all conflicts non-interactively in favor
+    of the other changes."""
+    success, status = _imergeauto(localorother='other', *args, **kwargs)
+    return success, status
+
+@internaltool('tagmerge', mergeonly,
               _("automatic tag merging of %s failed! "
                 "(use 'hg resolve --tool :merge' or another merge "
                 "tool of your choice)\n"))
@@ -262,7 +340,7 @@
     """
     return tagmerge.merge(repo, fcd, fco, fca)
 
-@internaltool('dump', True)
+@internaltool('dump', fullmerge)
 def _idump(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
     """
     Creates three versions of the files to merge, containing the
@@ -271,46 +349,41 @@
     ``a.txt``, these files will accordingly be named ``a.txt.local``,
     ``a.txt.other`` and ``a.txt.base`` and they will be placed in the
     same directory as ``a.txt``."""
-    r = _premerge(repo, toolconf, files, labels=labels)
-    if r:
-        a, b, c, back = files
+    a, b, c, back = files
+
+    fd = fcd.path()
 
-        fd = fcd.path()
-
-        util.copyfile(a, a + ".local")
-        repo.wwrite(fd + ".other", fco.data(), fco.flags())
-        repo.wwrite(fd + ".base", fca.data(), fca.flags())
-    return False, r
+    util.copyfile(a, a + ".local")
+    repo.wwrite(fd + ".other", fco.data(), fco.flags())
+    repo.wwrite(fd + ".base", fca.data(), fca.flags())
+    return False, 1
 
 def _xmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
-    r = _premerge(repo, toolconf, files, labels=labels)
-    if r:
-        tool, toolpath, binary, symlink = toolconf
-        a, b, c, back = files
-        out = ""
-        env = {'HG_FILE': fcd.path(),
-               'HG_MY_NODE': short(mynode),
-               'HG_OTHER_NODE': str(fco.changectx()),
-               'HG_BASE_NODE': str(fca.changectx()),
-               'HG_MY_ISLINK': 'l' in fcd.flags(),
-               'HG_OTHER_ISLINK': 'l' in fco.flags(),
-               'HG_BASE_ISLINK': 'l' in fca.flags(),
-               }
+    tool, toolpath, binary, symlink = toolconf
+    a, b, c, back = files
+    out = ""
+    env = {'HG_FILE': fcd.path(),
+           'HG_MY_NODE': short(mynode),
+           'HG_OTHER_NODE': str(fco.changectx()),
+           'HG_BASE_NODE': str(fca.changectx()),
+           'HG_MY_ISLINK': 'l' in fcd.flags(),
+           'HG_OTHER_ISLINK': 'l' in fco.flags(),
+           'HG_BASE_ISLINK': 'l' in fca.flags(),
+           }
 
-        ui = repo.ui
+    ui = repo.ui
 
-        args = _toolstr(ui, tool, "args", '$local $base $other')
-        if "$output" in args:
-            out, a = a, back # read input from backup, write to original
-        replace = {'local': a, 'base': b, 'other': c, 'output': out}
-        args = util.interpolate(r'\$', replace, args,
-                                lambda s: util.shellquote(util.localpath(s)))
-        cmd = toolpath + ' ' + args
-        repo.ui.debug('launching merge tool: %s\n' % cmd)
-        r = ui.system(cmd, cwd=repo.root, environ=env)
-        repo.ui.debug('merge tool returned: %s\n' % r)
-        return True, r
-    return False, 0
+    args = _toolstr(ui, tool, "args", '$local $base $other')
+    if "$output" in args:
+        out, a = a, back # read input from backup, write to original
+    replace = {'local': a, 'base': b, 'other': c, 'output': out}
+    args = util.interpolate(r'\$', replace, args,
+                            lambda s: util.shellquote(util.localpath(s)))
+    cmd = toolpath + ' ' + args
+    repo.ui.debug('launching merge tool: %s\n' % cmd)
+    r = ui.system(cmd, cwd=repo.root, environ=env)
+    repo.ui.debug('merge tool returned: %s\n' % r)
+    return True, r
 
 def _formatconflictmarker(repo, ctx, template, label, pad):
     """Applies the given template to the ctx, prefixed by the label.
@@ -365,14 +438,17 @@
         newlabels.append(_formatconflictmarker(repo, ca, tmpl, labels[2], pad))
     return newlabels
 
-def filemerge(repo, mynode, orig, fcd, fco, fca, labels=None):
+def _filemerge(premerge, repo, mynode, orig, fcd, fco, fca, labels=None):
     """perform a 3-way merge in the working directory
 
+    premerge = whether this is a premerge
     mynode = parent node before merge
     orig = original local filename before merge
     fco = other file context
     fca = ancestor file context
     fcd = local file context for current/destination file
+
+    Returns whether the merge is complete, and the return value of the merge.
     """
 
     def temp(prefix, ctx):
@@ -385,61 +461,89 @@
         return name
 
     if not fco.cmp(fcd): # files identical?
-        return None
+        return True, None
 
     ui = repo.ui
     fd = fcd.path()
     binary = fcd.isbinary() or fco.isbinary() or fca.isbinary()
     symlink = 'l' in fcd.flags() + fco.flags()
     tool, toolpath = _picktool(repo, ui, fd, binary, symlink)
+    if tool in internals and tool.startswith('internal:'):
+        # normalize to new-style names (':merge' etc)
+        tool = tool[len('internal'):]
     ui.debug("picked tool '%s' for %s (binary %s symlink %s)\n" %
                (tool, fd, binary, symlink))
 
     if tool in internals:
         func = internals[tool]
-        trymerge = func.trymerge
+        mergetype = func.mergetype
         onfailure = func.onfailure
+        precheck = func.precheck
     else:
         func = _xmerge
-        trymerge = True
+        mergetype = fullmerge
         onfailure = _("merging %s failed!\n")
+        precheck = None
 
     toolconf = tool, toolpath, binary, symlink
 
-    if not trymerge:
-        return func(repo, mynode, orig, fcd, fco, fca, toolconf)
+    if mergetype == nomerge:
+        return True, func(repo, mynode, orig, fcd, fco, fca, toolconf)
+
+    if premerge:
+        if orig != fco.path():
+            ui.status(_("merging %s and %s to %s\n") % (orig, fco.path(), fd))
+        else:
+            ui.status(_("merging %s\n") % fd)
+
+    ui.debug("my %s other %s ancestor %s\n" % (fcd, fco, fca))
+
+    if precheck and not precheck(repo, mynode, orig, fcd, fco, fca,
+                                 toolconf):
+        if onfailure:
+            ui.warn(onfailure % fd)
+        return True, 1
 
     a = repo.wjoin(fd)
     b = temp("base", fca)
     c = temp("other", fco)
     back = a + ".orig"
-    util.copyfile(a, back)
-
-    if orig != fco.path():
-        ui.status(_("merging %s and %s to %s\n") % (orig, fco.path(), fd))
-    else:
-        ui.status(_("merging %s\n") % fd)
-
-    ui.debug("my %s other %s ancestor %s\n" % (fcd, fco, fca))
+    if premerge:
+        util.copyfile(a, back)
+    files = (a, b, c, back)
 
-    markerstyle = ui.config('ui', 'mergemarkers', 'basic')
-    if not labels:
-        labels = _defaultconflictlabels
-    if markerstyle != 'basic':
-        labels = _formatlabels(repo, fcd, fco, fca, labels)
+    r = 1
+    try:
+        markerstyle = ui.config('ui', 'mergemarkers', 'basic')
+        if not labels:
+            labels = _defaultconflictlabels
+        if markerstyle != 'basic':
+            labels = _formatlabels(repo, fcd, fco, fca, labels)
 
-    needcheck, r = func(repo, mynode, orig, fcd, fco, fca, toolconf,
-                        (a, b, c, back), labels=labels)
-    if not needcheck:
+        if premerge and mergetype == fullmerge:
+            r = _premerge(repo, toolconf, files, labels=labels)
+            # complete if premerge successful (r is 0)
+            return not r, r
+
+        needcheck, r = func(repo, mynode, orig, fcd, fco, fca, toolconf, files,
+                            labels=labels)
+        if needcheck:
+            r = _check(r, ui, tool, fcd, files)
+
         if r:
             if onfailure:
                 ui.warn(onfailure % fd)
-        else:
+
+        return True, r
+    finally:
+        if not r:
             util.unlink(back)
-
         util.unlink(b)
         util.unlink(c)
-        return r
+
+def _check(r, ui, tool, fcd, files):
+    fd = fcd.path()
+    a, b, c, back = files
 
     if not r and (_toolbool(ui, tool, "checkconflicts") or
                   'conflicts' in _toollist(ui, tool, "check")):
@@ -455,7 +559,8 @@
             r = 1
 
     if not r and not checked and (_toolbool(ui, tool, "checkchanged") or
-                                  'changed' in _toollist(ui, tool, "check")):
+                                  'changed' in
+                                  _toollist(ui, tool, "check")):
         if filecmp.cmp(a, back):
             if ui.promptchoice(_(" output file %s appears unchanged\n"
                                  "was merge successful (yn)?"
@@ -465,15 +570,13 @@
     if _toolbool(ui, tool, "fixeol"):
         _matcheol(a, back)
 
-    if r:
-        if onfailure:
-            ui.warn(onfailure % fd)
-    else:
-        util.unlink(back)
+    return r
 
-    util.unlink(b)
-    util.unlink(c)
-    return r
+def premerge(repo, mynode, orig, fcd, fco, fca, labels=None):
+    return _filemerge(True, repo, mynode, orig, fcd, fco, fca, labels=labels)
+
+def filemerge(repo, mynode, orig, fcd, fco, fca, labels=None):
+    return _filemerge(False, repo, mynode, orig, fcd, fco, fca, labels=labels)
 
 # tell hggettext to extract docstrings from these functions:
 i18nfunctions = internals.values()
--- a/mercurial/fileset.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/fileset.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,9 +5,17 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
+from __future__ import absolute_import
+
 import re
-import parser, error, util, merge
-from i18n import _
+
+from .i18n import _
+from . import (
+    error,
+    merge,
+    parser,
+    util,
+)
 
 elements = {
     # token-type: binding-strength, primary, prefix, infix, suffix
@@ -46,7 +54,7 @@
                 c = program[pos]
                 decode = lambda x: x
             else:
-                decode = lambda x: x.decode('string-escape')
+                decode = parser.unescapestr
             pos += 1
             s = pos
             while pos < l: # find closing quote
@@ -124,7 +132,7 @@
 
 def modified(mctx, x):
     """``modified()``
-    File that is modified according to status.
+    File that is modified according to :hg:`status`.
     """
     # i18n: "modified" is a keyword
     getargs(x, 0, 0, _("modified takes no arguments"))
@@ -133,7 +141,7 @@
 
 def added(mctx, x):
     """``added()``
-    File that is added according to status.
+    File that is added according to :hg:`status`.
     """
     # i18n: "added" is a keyword
     getargs(x, 0, 0, _("added takes no arguments"))
@@ -142,7 +150,7 @@
 
 def removed(mctx, x):
     """``removed()``
-    File that is removed according to status.
+    File that is removed according to :hg:`status`.
     """
     # i18n: "removed" is a keyword
     getargs(x, 0, 0, _("removed takes no arguments"))
@@ -151,7 +159,7 @@
 
 def deleted(mctx, x):
     """``deleted()``
-    File that is deleted according to status.
+    File that is deleted according to :hg:`status`.
     """
     # i18n: "deleted" is a keyword
     getargs(x, 0, 0, _("deleted takes no arguments"))
@@ -160,7 +168,7 @@
 
 def unknown(mctx, x):
     """``unknown()``
-    File that is unknown according to status. These files will only be
+    File that is unknown according to :hg:`status`. These files will only be
     considered if this predicate is used.
     """
     # i18n: "unknown" is a keyword
@@ -170,7 +178,7 @@
 
 def ignored(mctx, x):
     """``ignored()``
-    File that is ignored according to status. These files will only be
+    File that is ignored according to :hg:`status`. These files will only be
     considered if this predicate is used.
     """
     # i18n: "ignored" is a keyword
@@ -180,7 +188,7 @@
 
 def clean(mctx, x):
     """``clean()``
-    File that is clean according to status.
+    File that is clean according to :hg:`status`.
     """
     # i18n: "clean" is a keyword
     getargs(x, 0, 0, _("clean takes no arguments"))
@@ -235,7 +243,7 @@
 
 def resolved(mctx, x):
     """``resolved()``
-    File that is marked resolved according to the resolve state.
+    File that is marked resolved according to :hg:`resolve -l`.
     """
     # i18n: "resolved" is a keyword
     getargs(x, 0, 0, _("resolved takes no arguments"))
@@ -246,7 +254,7 @@
 
 def unresolved(mctx, x):
     """``unresolved()``
-    File that is marked unresolved according to the resolve state.
+    File that is marked unresolved according to :hg:`resolve -l`.
     """
     # i18n: "unresolved" is a keyword
     getargs(x, 0, 0, _("unresolved takes no arguments"))
@@ -355,7 +363,7 @@
         try:
             d.decode(enc)
         except LookupError:
-            raise util.Abort(_("unknown encoding '%s'") % enc)
+            raise error.Abort(_("unknown encoding '%s'") % enc)
         except UnicodeDecodeError:
             continue
         s.append(f)
@@ -410,7 +418,7 @@
         # i18n: "subrepo" is a keyword
         pat = getstring(x, _("subrepo requires a pattern or no arguments"))
 
-        import match as matchmod # avoid circular import issues
+        from . import match as matchmod # avoid circular import issues
         fast = not matchmod.patkind(pat)
         if fast:
             def m(s):
--- a/mercurial/formatter.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/formatter.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,13 +5,23 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
+from __future__ import absolute_import
+
 import cPickle
-from node import hex, short
-from i18n import _
-import encoding, util
-import templater
 import os
 
+from .i18n import _
+from .node import (
+    hex,
+    short,
+)
+
+from . import (
+    encoding,
+    error,
+    templater,
+)
+
 class baseformatter(object):
     def __init__(self, ui, topic, opts):
         self._ui = ui
@@ -38,12 +48,14 @@
         self._item.update(data)
     def write(self, fields, deftext, *fielddata, **opts):
         '''do default text output while assigning data to item'''
-        for k, v in zip(fields.split(), fielddata):
-            self._item[k] = v
+        fieldkeys = fields.split()
+        assert len(fieldkeys) == len(fielddata)
+        self._item.update(zip(fieldkeys, fielddata))
     def condwrite(self, cond, fields, deftext, *fielddata, **opts):
         '''do conditional write (primarily for plain formatter)'''
-        for k, v in zip(fields.split(), fielddata):
-            self._item[k] = v
+        fieldkeys = fields.split()
+        assert len(fieldkeys) == len(fielddata)
+        self._item.update(zip(fieldkeys, fielddata))
     def plain(self, text, **opts):
         '''show raw text for non-templated mode'''
         pass
@@ -167,7 +179,7 @@
 
     if tmpl == 'list':
         ui.write(_("available styles: %s\n") % templater.stylelist())
-        raise util.Abort(_("specify a template"))
+        raise error.Abort(_("specify a template"))
 
     # perhaps it's a path to a map or a template
     if ('/' in tmpl or '\\' in tmpl) and os.path.isfile(tmpl):
--- a/mercurial/graphmod.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/graphmod.py	Tue Oct 20 15:59:10 2015 -0500
@@ -17,11 +17,16 @@
 Data depends on type.
 """
 
-from mercurial.node import nullrev
-import util
+from __future__ import absolute_import
 
 import heapq
 
+from .node import nullrev
+from . import (
+    revset,
+    util,
+)
+
 CHANGESET = 'C'
 
 def groupbranchiter(revs, parentsfunc, firstbranch=()):
@@ -233,8 +238,6 @@
     if not revs:
         return
 
-    cl = repo.changelog
-    lowestrev = revs.min()
     gpcache = {}
 
     if repo.ui.configbool('experimental', 'graph-group-branches', False):
@@ -244,7 +247,8 @@
         if firstbranchrevset:
             firstbranch = repo.revs(firstbranchrevset)
         parentrevs = repo.changelog.parentrevs
-        revs = list(groupbranchiter(revs, parentrevs, firstbranch))
+        revs = groupbranchiter(revs, parentrevs, firstbranch)
+        revs = revset.baseset(revs)
 
     for rev in revs:
         ctx = repo[rev]
@@ -256,7 +260,11 @@
         for mpar in mpars:
             gp = gpcache.get(mpar)
             if gp is None:
-                gp = gpcache[mpar] = grandparent(cl, lowestrev, revs, mpar)
+                # precompute slow query as we know reachableroots() goes
+                # through all revs (issue4782)
+                if not isinstance(revs, revset.baseset):
+                    revs = revset.baseset(revs)
+                gp = gpcache[mpar] = revset.reachableroots(repo, revs, [mpar])
             if not gp:
                 parents.append(mpar)
             else:
@@ -354,24 +362,6 @@
         yield (cur, type, data, (col, color), edges)
         seen = next
 
-def grandparent(cl, lowestrev, roots, head):
-    """Return all ancestors of head in roots which revision is
-    greater or equal to lowestrev.
-    """
-    pending = set([head])
-    seen = set()
-    kept = set()
-    llowestrev = max(nullrev, lowestrev)
-    while pending:
-        r = pending.pop()
-        if r >= llowestrev and r not in seen:
-            if r in roots:
-                kept.add(r)
-            else:
-                pending.update([p for p in cl.parentrevs(r)])
-            seen.add(r)
-    return sorted(kept)
-
 def asciiedges(type, char, lines, seen, rev, parents):
     """adds edge info to changelog DAG walk suitable for ascii()"""
     if rev not in seen:
--- a/mercurial/hbisect.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/hbisect.py	Tue Oct 20 15:59:10 2015 -0500
@@ -8,12 +8,19 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
+from __future__ import absolute_import
+
 import collections
 import os
-import error
-from i18n import _
-from node import short, hex
-import util
+
+from .i18n import _
+from .node import (
+    hex,
+    short,
+)
+from . import (
+    error,
+)
 
 def bisect(changelog, state):
     """find the next node (if any) for testing during a bisect search.
@@ -66,8 +73,8 @@
     if not ancestors: # now we're confused
         if (len(state['bad']) == 1 and len(state['good']) == 1 and
             state['bad'] != state['good']):
-            raise util.Abort(_("starting revisions are not directly related"))
-        raise util.Abort(_("inconsistent state, %s:%s is good and bad")
+            raise error.Abort(_("starting revisions are not directly related"))
+        raise error.Abort(_("inconsistent state, %s:%s is good and bad")
                          % (badrev, short(bad)))
 
     # build children dict
@@ -141,7 +148,7 @@
             kind, node = l[:-1].split()
             node = repo.lookup(node)
             if kind not in state:
-                raise util.Abort(_("unknown bisect kind %s") % kind)
+                raise error.Abort(_("unknown bisect kind %s") % kind)
             state[kind].append(node)
     return state
 
--- a/mercurial/help.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/help.py	Tue Oct 20 15:59:10 2015 -0500
@@ -14,19 +14,28 @@
 import cmdutil
 import hgweb.webcommands as webcommands
 
+_exclkeywords = [
+    "(DEPRECATED)",
+    "(EXPERIMENTAL)",
+    # i18n: "(DEPRECATED)" is a keyword, must be translated consistently
+    _("(DEPRECATED)"),
+    # i18n: "(EXPERIMENTAL)" is a keyword, must be translated consistently
+    _("(EXPERIMENTAL)"),
+    ]
+
 def listexts(header, exts, indent=1, showdeprecated=False):
     '''return a text listing of the given extensions'''
     rst = []
     if exts:
         rst.append('\n%s\n\n' % header)
         for name, desc in sorted(exts.iteritems()):
-            if '(DEPRECATED)' in desc and not showdeprecated:
+            if not showdeprecated and any(w in desc for w in _exclkeywords):
                 continue
             rst.append('%s:%s: %s\n' % (' ' * indent, name, desc))
     return rst
 
-def extshelp():
-    rst = loaddoc('extensions')().splitlines(True)
+def extshelp(ui):
+    rst = loaddoc('extensions')(ui).splitlines(True)
     rst.extend(listexts(
         _('enabled extensions:'), extensions.enabled(), showdeprecated=True))
     rst.extend(listexts(_('disabled extensions:'), extensions.disabled()))
@@ -43,9 +52,7 @@
             shortopt, longopt, default, desc = option
             optlabel = _("VALUE") # default label
 
-        if not verbose and ("DEPRECATED" in desc or _("DEPRECATED") in desc or
-                            "EXPERIMENTAL" in desc or
-                            _("EXPERIMENTAL") in desc):
+        if not verbose and any(w in desc for w in _exclkeywords):
             continue
 
         so = ''
@@ -76,7 +83,7 @@
     if notomitted:
         rst.append('\n\n.. container:: notomitted\n\n    %s\n\n' % notomitted)
 
-def topicmatch(kw):
+def topicmatch(ui, kw):
     """Return help topics matching kw.
 
     Returns {'section': [(name, summary), ...], ...} where section is
@@ -94,7 +101,7 @@
         # Old extensions may use a str as doc.
         if (sum(map(lowercontains, names))
             or lowercontains(header)
-            or (callable(doc) and lowercontains(doc()))):
+            or (callable(doc) and lowercontains(doc(ui)))):
             results['topics'].append((names[0], header))
     import commands # avoid cycle
     for cmd, entry in commands.table.iteritems():
@@ -132,12 +139,12 @@
 def loaddoc(topic):
     """Return a delayed loader for help/topic.txt."""
 
-    def loader():
+    def loader(ui):
         docdir = os.path.join(util.datapath, 'help')
         path = os.path.join(docdir, topic + ".txt")
         doc = gettext(util.readfile(path))
         for rewriter in helphooks.get(topic, []):
-            doc = rewriter(topic, doc)
+            doc = rewriter(ui, topic, doc)
         return doc
 
     return loader
@@ -177,14 +184,15 @@
 def addtopichook(topic, rewriter):
     helphooks.setdefault(topic, []).append(rewriter)
 
-def makeitemsdoc(topic, doc, marker, items, dedent=False):
+def makeitemsdoc(ui, topic, doc, marker, items, dedent=False):
     """Extract docstring from the items key to function mapping, build a
-    .single documentation block and use it to overwrite the marker in doc
+    single documentation block and use it to overwrite the marker in doc.
     """
     entries = []
     for name in sorted(items):
         text = (items[name].__doc__ or '').rstrip()
-        if not text:
+        if (not text
+            or not ui.verbose and any(w in text for w in _exclkeywords)):
             continue
         text = gettext(text)
         if dedent:
@@ -204,15 +212,15 @@
     return doc.replace(marker, entries)
 
 def addtopicsymbols(topic, marker, symbols, dedent=False):
-    def add(topic, doc):
-        return makeitemsdoc(topic, doc, marker, symbols, dedent=dedent)
+    def add(ui, topic, doc):
+        return makeitemsdoc(ui, topic, doc, marker, symbols, dedent=dedent)
     addtopichook(topic, add)
 
 addtopicsymbols('filesets', '.. predicatesmarker', fileset.symbols)
 addtopicsymbols('merge-tools', '.. internaltoolsmarker',
                 filemerge.internalsdoc)
 addtopicsymbols('revsets', '.. predicatesmarker', revset.symbols)
-addtopicsymbols('templates', '.. keywordsmarker', templatekw.dockeywords)
+addtopicsymbols('templates', '.. keywordsmarker', templatekw.keywords)
 addtopicsymbols('templates', '.. filtersmarker', templatefilters.filters)
 addtopicsymbols('templates', '.. functionsmarker', templater.funcs)
 addtopicsymbols('hgweb', '.. webcommandsmarker', webcommands.commands,
@@ -334,7 +342,7 @@
             if not ui.debugflag and f.startswith("debug") and name != "debug":
                 continue
             doc = e[0].__doc__
-            if doc and 'DEPRECATED' in doc and not ui.verbose:
+            if not ui.verbose and doc and any(w in doc for w in _exclkeywords):
                 continue
             doc = gettext(doc)
             if not doc:
@@ -408,7 +416,7 @@
         if not doc:
             rst.append("    %s\n" % _("(no help text available)"))
         if callable(doc):
-            rst += ["    %s\n" % l for l in doc().splitlines()]
+            rst += ["    %s\n" % l for l in doc(ui).splitlines()]
 
         if not ui.verbose:
             omitted = _('(some details hidden, use --verbose'
@@ -475,11 +483,18 @@
     rst = []
     kw = opts.get('keyword')
     if kw:
-        matches = topicmatch(kw)
-        for t, title in (('topics', _('Topics')),
+        matches = topicmatch(ui, name)
+        helpareas = []
+        if opts.get('extension'):
+            helpareas += [('extensions', _('Extensions'))]
+        if opts.get('command'):
+            helpareas += [('commands', _('Commands'))]
+        if not helpareas:
+            helpareas = [('topics', _('Topics')),
                          ('commands', _('Commands')),
                          ('extensions', _('Extensions')),
-                         ('extensioncommands', _('Extension Commands'))):
+                         ('extensioncommands', _('Extension Commands'))]
+        for t, title in helpareas:
             if matches[t]:
                 rst.append('%s:\n\n' % title)
                 rst.extend(minirst.maketable(sorted(matches[t]), 1))
@@ -487,15 +502,16 @@
         if not rst:
             msg = _('no matches')
             hint = _('try "hg help" for a list of topics')
-            raise util.Abort(msg, hint=hint)
+            raise error.Abort(msg, hint=hint)
     elif name and name != 'shortlist':
+        queries = []
         if unknowncmd:
-            queries = (helpextcmd,)
-        elif opts.get('extension'):
-            queries = (helpext,)
-        elif opts.get('command'):
-            queries = (helpcmd,)
-        else:
+            queries += [helpextcmd]
+        if opts.get('extension'):
+            queries += [helpext]
+        if opts.get('command'):
+            queries += [helpcmd]
+        if not queries:
             queries = (helptopic, helpcmd, helpext, helpextcmd)
         for f in queries:
             try:
@@ -509,7 +525,7 @@
             else:
                 msg = _('no such help topic: %s') % name
                 hint = _('try "hg help --keyword %s"') % name
-                raise util.Abort(msg, hint=hint)
+                raise error.Abort(msg, hint=hint)
     else:
         # program name
         if not ui.quiet:
--- a/mercurial/help/config.txt	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/help/config.txt	Tue Oct 20 15:59:10 2015 -0500
@@ -1,6 +1,19 @@
 The Mercurial system uses a set of configuration files to control
 aspects of its behavior.
 
+Troubleshooting
+===============
+
+If you're having problems with your configuration,
+:hg:`config --debug` can help you understand what is introducing
+a setting into your environment.
+
+See :hg:`help config.syntax` and :hg:`help config.files`
+for information about how and where to override things.
+
+Format
+======
+
 The configuration files use a simple ini-file format. A configuration
 file consists of sections, led by a ``[section]`` header and followed
 by ``name = value`` entries::
@@ -10,7 +23,7 @@
   verbose = True
 
 The above entries will be referred to as ``ui.username`` and
-``ui.verbose``, respectively. See the Syntax section below.
+``ui.verbose``, respectively. See :hg:`help config.syntax`.
 
 Files
 =====
@@ -49,9 +62,9 @@
   - ``%USERPROFILE%\Mercurial.ini`` (per-user)
   - ``%HOME%\.hgrc`` (per-user)
   - ``%HOME%\Mercurial.ini`` (per-user)
-  - ``<install-dir>\Mercurial.ini`` (per-installation)
+  - ``HKEY_LOCAL_MACHINE\SOFTWARE\Mercurial`` (per-installation)
   - ``<install-dir>\hgrc.d\*.rc`` (per-installation)
-  - ``HKEY_LOCAL_MACHINE\SOFTWARE\Mercurial`` (per-installation)
+  - ``<install-dir>\Mercurial.ini`` (per-installation)
   - ``<internal>/default.d/*.rc`` (defaults)
 
   .. note::
@@ -76,8 +89,8 @@
 will not get transferred during a "clone" operation. Options in
 this file override options in all other configuration files. On
 Plan 9 and Unix, most of this file will be ignored if it doesn't
-belong to a trusted user or to a trusted group. See the documentation
-for the ``[trusted]`` section below for more details.
+belong to a trusted user or to a trusted group. See
+:hg:`help config.trusted` for more details.
 
 Per-user configuration file(s) are for the user running Mercurial. On
 Windows 9x, ``%HOME%`` is replaced by ``%APPDATA%``. Options in these
@@ -208,9 +221,10 @@
 ---------
 
 Defines command aliases.
+
 Aliases allow you to define your own commands in terms of other
 commands (or aliases), optionally including arguments. Positional
-arguments in the form of ``$1``, ``$2``, etc in the alias definition
+arguments in the form of ``$1``, ``$2``, etc. in the alias definition
 are expanded by Mercurial before execution. Positional arguments not
 already used by ``$N`` in the definition are put at the end of the
 command to be executed.
@@ -273,8 +287,8 @@
 ------------
 
 Settings used when displaying file annotations. All values are
-Booleans and default to False. See ``diff`` section for related
-options for the diff command.
+Booleans and default to False. See :hg:`help config.diff` for
+related options for the diff command.
 
 ``ignorews``
     Ignore white space when comparing lines.
@@ -291,7 +305,7 @@
 
 Authentication credentials for HTTP authentication. This section
 allows you to store usernames and passwords for use when logging
-*into* HTTP servers. See the ``[web]`` configuration section if
+*into* HTTP servers. See :hg:`help config.web` if
 you want to configure *who* can login to your HTTP server.
 
 Each line has the following format::
@@ -347,7 +361,7 @@
     authentication entry with. Only used if the prefix doesn't include
     a scheme. Supported schemes are http and https. They will match
     static-http and static-https respectively, as well.
-    Default: https.
+    (default: https)
 
 If no suitable authentication entry is found, the user is prompted
 for credentials as usual if required by the remote.
@@ -356,8 +370,9 @@
 ``committemplate``
 ------------------
 
-``changeset`` configuration in this section is used as the template to
-customize the text shown in the editor when committing.
+``changeset``
+    String: configuration in this section is used as the template to
+    customize the text shown in the editor when committing.
 
 In addition to pre-defined template keywords, commit log specific one
 below can be used for customization:
@@ -390,10 +405,10 @@
    detail), this customization should be configured carefully, to
    avoid showing broken characters.
 
-   For example, if multibyte character ending with backslash (0x5c) is
-   followed by ASCII character 'n' in the customized template,
-   sequence of backslash and 'n' is treated as line-feed unexpectedly
-   (and multibyte character is broken, too).
+   For example, if a multibyte character ending with backslash (0x5c) is
+   followed by the ASCII character 'n' in the customized template,
+   the sequence of backslash and 'n' is treated as line-feed unexpectedly
+   (and the multibyte character is broken, too).
 
 Customized template is used for commands below (``--edit`` may be
 required):
@@ -447,9 +462,10 @@
 only for :hg:`tag --remove`, but ``changeset.tag`` customizes the
 commit message for :hg:`tag` regardless of ``--remove`` option.
 
-At the external editor invocation for committing, corresponding
-dot-separated list of names without ``changeset.`` prefix
-(e.g. ``commit.normal.normal``) is in ``HGEDITFORM`` environment variable.
+When the external editor is invoked for a commit, the corresponding
+dot-separated list of names without the ``changeset.`` prefix
+(e.g. ``commit.normal.normal``) is in the ``HGEDITFORM`` environment
+variable.
 
 In this section, items other than ``changeset`` can be referred from
 others. For example, the configuration to list committed files up
@@ -514,7 +530,7 @@
 ``defaults``
 ------------
 
-(defaults are deprecated. Don't use them. Use aliases instead)
+(defaults are deprecated. Don't use them. Use aliases instead.)
 
 Use the ``[defaults]`` section to define command defaults, i.e. the
 default options/arguments to pass to the specified commands.
@@ -535,8 +551,8 @@
 --------
 
 Settings used when displaying diffs. Everything except for ``unified``
-is a Boolean and defaults to False. See ``annotate`` section for
-related options for the annotate command.
+is a Boolean and defaults to False. See :hg:`help config.annotate`
+for related options for the annotate command.
 
 ``git``
     Use git extended diff format.
@@ -599,8 +615,8 @@
     containing patches of outgoing messages will be encoded in the
     first character set to which conversion from local encoding
     (``$HGENCODING``, ``ui.fallbackencoding``) succeeds. If correct
-    conversion fails, the text in question is sent as is. Defaults to
-    empty (explicit) list.
+    conversion fails, the text in question is sent as is.
+    (default: '')
 
     Order of outgoing email character sets:
 
@@ -711,8 +727,7 @@
 action. Overriding a site-wide hook can be done by changing its
 value or setting it to an empty string.  Hooks can be prioritized
 by adding a prefix of ``priority`` to the hook name on a new line
-and setting the priority.  The default priority is 0 if
-not specified.
+and setting the priority. The default priority is 0.
 
 Example ``.hg/hgrc``::
 
@@ -748,7 +763,7 @@
 ``outgoing``
   Run after sending changes from local repository to another. ID of
   first changeset sent is in ``$HG_NODE``. Source of operation is in
-  ``$HG_SOURCE``; see "preoutgoing" hook for description.
+  ``$HG_SOURCE``; Also see :hg:`help config.preoutgoing` hook.
 
 ``post-<command>``
   Run after successful invocations of the associated command. The
@@ -830,12 +845,12 @@
 ``txnclose``
   Run after any repository transaction has been committed. At this
   point, the transaction can no longer be rolled back. The hook will run
-  after the lock is released. See ``pretxnclose`` docs for details about
-  available variables.
+  after the lock is released. See :hg:`help config.pretxnclose` docs for
+  details about available variables.
 
 ``txnabort``
-  Run when a transaction is aborted. See ``pretxnclose`` docs for details about
-  available variables.
+  Run when a transaction is aborted. See :hg:`help config.pretxnclose`
+  docs for details about available variables.
 
 ``pretxnchangegroup``
   Run after a changegroup has been added via push, pull or unbundle,
@@ -954,7 +969,7 @@
 
 ``always``
     Optional. Always use the proxy, even for localhost and any entries
-    in ``http_proxy.no``. True or False. Default: False.
+    in ``http_proxy.no``. (default: False)
 
 ``merge-patterns``
 ------------------
@@ -989,7 +1004,10 @@
   kdiff3.priority = 1
 
   # Changing the priority of preconfigured tool
-  vimdiff.priority = 0
+  meld.priority = 0
+
+  # Disable a preconfigured tool
+  vimdiff.disabled = yes
 
   # Define new tool
   myHtmlTool.args = -m $local $other $base $output
@@ -1000,12 +1018,12 @@
 
 ``priority``
   The priority in which to evaluate this tool.
-  Default: 0.
+  (default: 0)
 
 ``executable``
   Either just the name of the executable or its pathname.  On Windows,
   the path can use environment variables with ${ProgramFiles} syntax.
-  Default: the tool name.
+  (default: the tool name)
 
 ``args``
   The arguments to pass to the tool executable. You can refer to the
@@ -1017,7 +1035,7 @@
   to or the commit you are merging with. During a rebase ``$local``
   represents the destination of the rebase, and ``$other`` represents the
   commit being rebased.
-  Default: ``$local $base $other``
+  (default: ``$local $base $other``)
 
 ``premerge``
   Attempt to run internal non-interactive 3-way merge tool before
@@ -1026,15 +1044,14 @@
   premerge fails. The ``keep-merge3`` will do the same but include information
   about the base of the merge in the marker (see internal :merge3 in
   :hg:`help merge-tools`).
-  Default: True
+  (default: True)
 
 ``binary``
-  This tool can merge binary files. Defaults to False, unless tool
-  was selected by file pattern match.
+  This tool can merge binary files. (default: False, unless tool
+  was selected by file pattern match)
 
 ``symlink``
-  This tool can merge symlinks. Defaults to False, even if tool was
-  selected by file pattern match.
+  This tool can merge symlinks. (default: False)
 
 ``check``
   A list of merge success-checking options:
@@ -1048,32 +1065,32 @@
 
 ``fixeol``
   Attempt to fix up EOL changes caused by the merge tool.
-  Default: False
+  (default: False)
 
 ``gui``
-  This tool requires a graphical interface to run. Default: False
+  This tool requires a graphical interface to run. (default: False)
 
 ``regkey``
   Windows registry key which describes install location of this
   tool. Mercurial will search for this key first under
   ``HKEY_CURRENT_USER`` and then under ``HKEY_LOCAL_MACHINE``.
-  Default: None
+  (default: None)
 
 ``regkeyalt``
   An alternate Windows registry key to try if the first key is not
   found.  The alternate key uses the same ``regname`` and ``regappend``
   semantics of the primary key.  The most common use for this key
   is to search for 32bit applications on 64bit operating systems.
-  Default: None
+  (default: None)
 
 ``regname``
-  Name of value to read from specified registry key. Defaults to the
-  unnamed (default) value.
+  Name of value to read from specified registry key.
+  (default: the unnamed (default) value)
 
 ``regappend``
   String to append to the value read from the registry, typically
   the executable name of the tool.
-  Default: None
+  (default: None)
 
 
 ``patch``
@@ -1091,13 +1108,13 @@
     endings in patched files are normalized to their original setting
     on a per-file basis. If target file does not exist or has no end
     of line, patch line endings are preserved.
-    Default: strict.
+    (default: strict)
 
 ``fuzz``
     The number of lines of 'fuzz' to allow when applying patches. This
     controls how much context the patcher is allowed to ignore when
     trying to apply a patch.
-    Default: 2
+    (default: 2)
 
 ``paths``
 ---------
@@ -1109,8 +1126,7 @@
 
 ``default``
     Directory or URL to use when pulling if no source is specified.
-    Default is set to repository from which the current repository was
-    cloned.
+    (default: repository from which the current repository was cloned)
 
 ``default-push``
     Optional. Directory or URL to use when pushing if no destination
@@ -1137,11 +1153,11 @@
     Controls draft phase behavior when working as a server. When true,
     pushed changesets are set to public in both client and server and
     pulled or cloned changesets are set to public in the client.
-    Default: True
+    (default: True)
 
 ``new-commit``
     Phase of newly-created commits.
-    Default: draft
+    (default: draft)
 
 ``checksubrepos``
     Check the phase of the current revision of each subrepository. Allowed
@@ -1152,7 +1168,7 @@
     "secret" phase while the parent repo is in "draft" phase), the commit is
     either aborted (if checksubrepos is set to "abort") or the higher phase is
     used for the parent repository commit (if set to "follow").
-    Default: "follow"
+    (default: follow)
 
 
 ``profiling``
@@ -1169,7 +1185,7 @@
 
 ``type``
     The type of profiler to use.
-    Default: ls.
+    (default: ls)
 
     ``ls``
       Use Python's built-in instrumenting profiler. This profiler
@@ -1183,7 +1199,7 @@
 
 ``format``
     Profiling format.  Specific to the ``ls`` instrumenting profiler.
-    Default: text.
+    (default: text)
 
     ``text``
       Generate a profiling report. When saving to a file, it should be
@@ -1196,28 +1212,28 @@
 
 ``frequency``
     Sampling frequency.  Specific to the ``stat`` sampling profiler.
-    Default: 1000.
+    (default: 1000)
 
 ``output``
     File path where profiling data or report should be saved. If the
-    file exists, it is replaced. Default: None, data is printed on
-    stderr
+    file exists, it is replaced. (default: None, data is printed on
+    stderr)
 
 ``sort``
     Sort field.  Specific to the ``ls`` instrumenting profiler.
     One of ``callcount``, ``reccallcount``, ``totaltime`` and
     ``inlinetime``.
-    Default: inlinetime.
+    (default: inlinetime)
 
 ``limit``
     Number of lines to show. Specific to the ``ls`` instrumenting profiler.
-    Default: 30.
+    (default: 30)
 
 ``nested``
     Show at most this number of lines of drill-down info after each main entry.
     This can help explain the difference between Total and Inline.
     Specific to the ``ls`` instrumenting profiler.
-    Default: 5.
+    (default: 5)
 
 ``progress``
 ------------
@@ -1249,16 +1265,16 @@
 
 ``width``
     If set, the maximum width of the progress information (that is, min(width,
-    term width) will be used)
+    term width) will be used).
 
 ``clear-complete``
-    clear the progress bar after it's done (default to True)
+    Clear the progress bar after it's done. (default: True)
 
 ``disable``
-    If true, don't show a progress bar
+    If true, don't show a progress bar.
 
 ``assume-tty``
-    If true, ALWAYS show a progress bar, unless disable is given
+    If true, ALWAYS show a progress bar, unless disable is given.
 
 ``revsetalias``
 ---------------
@@ -1280,20 +1296,20 @@
     about 6 Mbps), uncompressed streaming is slower, because of the
     extra data transfer overhead. This mode will also temporarily hold
     the write lock while determining what data to transfer.
-    Default is True.
+    (default: True)
 
 ``preferuncompressed``
     When set, clients will try to use the uncompressed streaming
-    protocol. Default is False.
+    protocol. (default: False)
 
 ``validate``
     Whether to validate the completeness of pushed changesets by
     checking that all new file revisions specified in manifests are
-    present. Default is False.
+    present. (default: False)
 
 ``maxhttpheaderlen``
     Instruct HTTP clients not to send request headers longer than this
-    many bytes. Default is 1024.
+    many bytes. (default: 1024)
 
 ``smtp``
 --------
@@ -1304,12 +1320,12 @@
     Host name of mail server, e.g. "mail.example.com".
 
 ``port``
-    Optional. Port to connect to on mail server. Default: 465 (if
-    ``tls`` is smtps) or 25 (otherwise).
+    Optional. Port to connect to on mail server. (default: 465 if
+    ``tls`` is smtps; 25 otherwise)
 
 ``tls``
     Optional. Method to enable TLS when connecting to mail server: starttls,
-    smtps or none. Default: none.
+    smtps or none. (default: none)
 
 ``verifycert``
     Optional. Verification for the certificate of mail server, when
@@ -1319,19 +1335,19 @@
     ``[web] cacerts`` also). For "strict", sending email is also
     aborted, if there is no configuration for mail server in
     ``[hostfingerprints]`` and ``[web] cacerts``.  --insecure for
-    :hg:`email` overwrites this as "loose". Default: "strict".
+    :hg:`email` overwrites this as "loose". (default: strict)
 
 ``username``
     Optional. User name for authenticating with the SMTP server.
-    Default: none.
+    (default: None)
 
 ``password``
     Optional. Password for authenticating with the SMTP server. If not
     specified, interactive sessions will prompt the user for a
-    password; non-interactive sessions will fail. Default: none.
+    password; non-interactive sessions will fail. (default: None)
 
 ``local_hostname``
-    Optional. It's the hostname that the sender can use to identify
+    Optional. The hostname that the sender can use to identify
     itself to the MTA.
 
 
@@ -1390,30 +1406,45 @@
     Whether to include the .hg_archival.txt file containing meta data
     (hashes for the repository base and for tip) in archives created
     by the :hg:`archive` command or downloaded via hgweb.
-    Default is True.
+    (default: True)
 
 ``askusername``
     Whether to prompt for a username when committing. If True, and
     neither ``$HGUSER`` nor ``$EMAIL`` has been specified, then the user will
     be prompted to enter a username. If no username is entered, the
     default ``USER@HOST`` is used instead.
-    Default is False.
+    (default: False)
+
+``clonebundlefallback``
+    Whether failure to apply an advertised "clone bundle" from a server
+    should result in fallback to a regular clone.
+
+    This is disabled by default because servers advertising "clone
+    bundles" often do so to reduce server load. If advertised bundles
+    start mass failing and clients automatically fall back to a regular
+    clone, this would add significant and unexpected load to the server
+    since the server is expecting clone operations to be offloaded to
+    pre-generated bundles. Failing fast (the default behavior) ensures
+    clients don't overwhelm the server when "clone bundle" application
+    fails.
+
+    (default: False)
 
 ``commitsubrepos``
     Whether to commit modified subrepositories when committing the
     parent repository. If False and one subrepository has uncommitted
     changes, abort the commit.
-    Default is False.
+    (default: False)
 
 ``debug``
-    Print debugging information. True or False. Default is False.
+    Print debugging information. (default: False)
 
 ``editor``
-    The editor to use during a commit. Default is ``$EDITOR`` or ``vi``.
+    The editor to use during a commit. (default: ``$EDITOR`` or ``vi``)
 
 ``fallbackencoding``
     Encoding to try if it's not possible to decode the changelog using
-    UTF-8. Default is ISO-8859-1.
+    UTF-8. (default: ISO-8859-1)
 
 ``ignore``
     A file to read per-user ignore patterns from. This file should be
@@ -1424,7 +1455,7 @@
     of the ignore file format, see the ``hgignore(5)`` man page.
 
 ``interactive``
-    Allow to prompt the user. True or False. Default is True.
+    Allow to prompt the user. (default: True)
 
 ``logtemplate``
     Template string for commands that print changesets.
@@ -1439,14 +1470,16 @@
     style uses the ``mergemarkertemplate`` setting to style the labels.
     The ``basic`` style just uses 'local' and 'other' as the marker label.
     One of ``basic`` or ``detailed``.
-    Default is ``basic``.
+    (default: ``basic``)
 
 ``mergemarkertemplate``
     The template used to print the commit description next to each conflict
     marker during merge conflicts. See :hg:`help templates` for the template
     format.
+
     Defaults to showing the hash, tags, branches, bookmarks, author, and
     the first line of the commit description.
+
     If you use non-ASCII characters in names for tags, branches, bookmarks,
     authors, and/or commit descriptions, you must pay attention to encodings of
     managed files. At template expansion, non-ASCII characters use the encoding
@@ -1470,7 +1503,7 @@
 
 ``portablefilenames``
     Check for portable filenames. Can be ``warn``, ``ignore`` or ``abort``.
-    Default is ``warn``.
+    (default: ``warn``)
     If set to ``warn`` (or ``true``), a warning message is printed on POSIX
     platforms, if a file with a non-portable filename is added (e.g. a file
     with a name that can't be created on Windows because it contains reserved
@@ -1481,56 +1514,63 @@
     On Windows, this configuration option is ignored and the command aborted.
 
 ``quiet``
-    Reduce the amount of output printed. True or False. Default is False.
+    Reduce the amount of output printed. (default: False)
 
 ``remotecmd``
-    remote command to use for clone/push/pull operations. Default is ``hg``.
+    Remote command to use for clone/push/pull operations. (default: ``hg``)
 
 ``report_untrusted``
     Warn if a ``.hg/hgrc`` file is ignored due to not being owned by a
-    trusted user or group. True or False. Default is True.
+    trusted user or group. (default: True)
 
 ``slash``
     Display paths using a slash (``/``) as the path separator. This
     only makes a difference on systems where the default path
     separator is not the slash character (e.g. Windows uses the
     backslash character (``\``)).
-    Default is False.
+    (default: False)
 
 ``statuscopies``
     Display copies in the status command.
 
 ``ssh``
-    command to use for SSH connections. Default is ``ssh``.
+    Command to use for SSH connections. (default: ``ssh``)
 
 ``strict``
     Require exact command names, instead of allowing unambiguous
-    abbreviations. True or False. Default is False.
+    abbreviations. (default: False)
 
 ``style``
     Name of style to use for command output.
 
+``supportcontact``
+    A URL where users should report a Mercurial traceback. Use this if you are a
+    large organisation with its own Mercurial deployment process and crash
+    reports should be addressed to your internal support.
+
 ``timeout``
     The timeout used when a lock is held (in seconds), a negative value
-    means no timeout. Default is 600.
+    means no timeout. (default: 600)
 
 ``traceback``
     Mercurial always prints a traceback when an unknown exception
     occurs. Setting this to True will make Mercurial print a traceback
     on all exceptions, even those recognized by Mercurial (such as
-    IOError or MemoryError). Default is False.
+    IOError or MemoryError). (default: False)
 
 ``username``
     The committer of a changeset created when running "commit".
     Typically a person's name and email address, e.g. ``Fred Widget
-    <fred@example.com>``. Default is ``$EMAIL`` or ``username@hostname``. If
-    the username in hgrc is empty, it has to be specified manually or
-    in a different hgrc file (e.g. ``$HOME/.hgrc``, if the admin set
-    ``username =``  in the system hgrc). Environment variables in the
+    <fred@example.com>``. Environment variables in the
     username are expanded.
 
+    (default: ``$EMAIL`` or ``username@hostname``. If the username in
+    hgrc is empty, e.g. if the system admin set ``username =``  in the
+    system hgrc, it has to be specified manually or in a different
+    hgrc file)
+
 ``verbose``
-    Increase the amount of output printed. True or False. Default is False.
+    Increase the amount of output printed. (default: False)
 
 
 ``web``
@@ -1560,35 +1600,35 @@
 The full set of options is:
 
 ``accesslog``
-    Where to output the access log. Default is stdout.
+    Where to output the access log. (default: stdout)
 
 ``address``
-    Interface address to bind to. Default is all.
+    Interface address to bind to. (default: all)
 
 ``allow_archive``
     List of archive format (bz2, gz, zip) allowed for downloading.
-    Default is empty.
+    (default: empty)
 
 ``allowbz2``
     (DEPRECATED) Whether to allow .tar.bz2 downloading of repository
     revisions.
-    Default is False.
+    (default: False)
 
 ``allowgz``
     (DEPRECATED) Whether to allow .tar.gz downloading of repository
     revisions.
-    Default is False.
+    (default: False)
 
 ``allowpull``
-    Whether to allow pulling from the repository. Default is True.
+    Whether to allow pulling from the repository. (default: True)
 
 ``allow_push``
     Whether to allow pushing to the repository. If empty or not set,
-    push is not allowed. If the special value ``*``, any remote user can
-    push, including unauthenticated users. Otherwise, the remote user
-    must have been authenticated, and the authenticated user name must
-    be present in this list. The contents of the allow_push list are
-    examined after the deny_push list.
+    pushing is not allowed. If the special value ``*``, any remote
+    user can push, including unauthenticated users. Otherwise, the
+    remote user must have been authenticated, and the authenticated
+    user name must be present in this list. The contents of the
+    allow_push list are examined after the deny_push list.
 
 ``allow_read``
     If the user has not already been denied repository access due to
@@ -1603,11 +1643,12 @@
 
 ``allowzip``
     (DEPRECATED) Whether to allow .zip downloading of repository
-    revisions. Default is False. This feature creates temporary files.
+    revisions. This feature creates temporary files.
+    (default: False)
 
 ``archivesubrepos``
-    Whether to recurse into subrepositories when archiving. Default is
-    False.
+    Whether to recurse into subrepositories when archiving.
+    (default: False)
 
 ``baseurl``
     Base URL to use when publishing URLs in other locations, so
@@ -1642,7 +1683,7 @@
         -----END CERTIFICATE-----
 
 ``cache``
-    Whether to support caching in hgweb. Defaults to True.
+    Whether to support caching in hgweb. (default: True)
 
 ``certificate``
     Certificate to use when running :hg:`serve`.
@@ -1654,17 +1695,18 @@
     the current path are grouped behind navigable directory entries that
     lead to the locations of these repositories. In effect, this setting
     collapses each collection of repositories found within a subdirectory
-    into a single entry for that subdirectory. Default is False.
+    into a single entry for that subdirectory. (default: False)
 
 ``comparisoncontext``
     Number of lines of context to show in side-by-side file comparison. If
-    negative or the value ``full``, whole files are shown. Default is 5.
+    negative or the value ``full``, whole files are shown. (default: 5)
+
     This setting can be overridden by a ``context`` request parameter to the
     ``comparison`` command, taking the same values.
 
 ``contact``
     Name or email address of the person in charge of the repository.
-    Defaults to ui.username or ``$EMAIL`` or "unknown" if unset or empty.
+    (default: ui.username or ``$EMAIL`` or "unknown" if unset or empty)
 
 ``deny_push``
     Whether to deny pushing to the repository. If empty or not set,
@@ -1695,28 +1737,28 @@
 
 ``description``
     Textual description of the repository's purpose or contents.
-    Default is "unknown".
+    (default: "unknown")
 
 ``encoding``
-    Character encoding name. Default is the current locale charset.
-    Example: "UTF-8"
+    Character encoding name. (default: the current locale charset)
+    Example: "UTF-8".
 
 ``errorlog``
-    Where to output the error log. Default is stderr.
+    Where to output the error log. (default: stderr)
 
 ``guessmime``
     Control MIME types for raw download of file content.
     Set to True to let hgweb guess the content type from the file
     extension. This will serve HTML files as ``text/html`` and might
     allow cross-site scripting attacks when serving untrusted
-    repositories. Default is False.
+    repositories. (default: False)
 
 ``hidden``
     Whether to hide the repository in the hgwebdir index.
-    Default is False.
+    (default: False)
 
 ``ipv6``
-    Whether to use IPv6. Default is False.
+    Whether to use IPv6. (default: False)
 
 ``logoimg``
     File name of the logo image that some templates display on each page.
@@ -1725,32 +1767,41 @@
     If unset, ``hglogo.png`` will be used.
 
 ``logourl``
-    Base URL to use for logos. If unset, ``http://mercurial.selenic.com/``
+    Base URL to use for logos. If unset, ``https://mercurial-scm.org/``
     will be used.
 
 ``maxchanges``
-    Maximum number of changes to list on the changelog. Default is 10.
+    Maximum number of changes to list on the changelog. (default: 10)
 
 ``maxfiles``
-    Maximum number of files to list per changeset. Default is 10.
+    Maximum number of files to list per changeset. (default: 10)
 
 ``maxshortchanges``
     Maximum number of changes to list on the shortlog, graph or filelog
-    pages. Default is 60.
+    pages. (default: 60)
 
 ``name``
-    Repository name to use in the web interface. Default is current
-    working directory.
+    Repository name to use in the web interface.
+    (default: current working directory)
 
 ``port``
-    Port to listen on. Default is 8000.
+    Port to listen on. (default: 8000)
 
 ``prefix``
-    Prefix path to serve from. Default is '' (server root).
+    Prefix path to serve from. (default: '' (server root))
 
 ``push_ssl``
     Whether to require that inbound pushes be transported over SSL to
-    prevent password sniffing. Default is True.
+    prevent password sniffing. (default: True)
+
+``refreshinterval``
+    How frequently directory listings re-scan the filesystem for new
+    repositories, in seconds. This is relevant when wildcards are used
+    to define paths. Depending on how much filesystem traversal is
+    required, refreshing may negatively impact performance.
+
+    Values less than or equal to 0 always refresh.
+    (default: 20)
 
 ``staticurl``
     Base URL to use for static files. If unset, static files (e.g. the
@@ -1760,12 +1811,12 @@
 
 ``stripes``
     How many lines a "zebra stripe" should span in multi-line output.
-    Default is 1; set to 0 to disable.
+    Set to 0 to disable. (default: 1)
 
 ``style``
     Which template map style to use. The available options are the names of
-    subdirectories in the HTML templates path. Default is ``paper``.
-    Example: ``monoblue``
+    subdirectories in the HTML templates path. (default: ``paper``)
+    Example: ``monoblue``.
 
 ``templates``
     Where to find the HTML templates. The default path to the HTML templates
@@ -1812,6 +1863,6 @@
 helps performance.
 
 ``numcpus``
-    Number of CPUs to use for parallel operations. Default is 4 or the
-    number of CPUs on the system, whichever is larger. A zero or
+    Number of CPUs to use for parallel operations. A zero or
     negative value is treated as ``use the default``.
+    (default: 4 or the number of CPUs on the system, whichever is larger)
--- a/mercurial/help/glossary.txt	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/help/glossary.txt	Tue Oct 20 15:59:10 2015 -0500
@@ -32,12 +32,12 @@
     explicitly with a named branch, but it can also be done locally,
     using bookmarks or clones and anonymous branches.
 
-    Example: "The experimental branch".
+    Example: "The experimental branch."
 
     (Verb) The action of creating a child changeset which results in
     its parent having more than one child.
 
-    Example: "I'm going to branch at X".
+    Example: "I'm going to branch at X."
 
 Branch, anonymous
     Every time a new child changeset is created from a parent that is not
@@ -135,7 +135,7 @@
     See 'Changeset, child'.
 
 Close changeset
-    See 'Head, closed branch'
+    See 'Head, closed branch'.
 
 Closed branch
     See 'Branch, closed'.
@@ -144,11 +144,11 @@
     (Noun) An entire or partial copy of a repository. The partial
     clone must be in the form of a revision and its ancestors.
 
-    Example: "Is your clone up to date?".
+    Example: "Is your clone up to date?"
 
     (Verb) The process of creating a clone, using :hg:`clone`.
 
-    Example: "I'm going to clone the repository".
+    Example: "I'm going to clone the repository."
 
 Closed branch head
     See 'Head, closed branch'.
@@ -398,13 +398,13 @@
 Update
     (Noun) Another synonym of changeset.
 
-    Example: "I've pushed an update".
+    Example: "I've pushed an update."
 
     (Verb) This term is usually used to describe updating the state of
     the working directory to that of a specific changeset. See
     :hg:`help update`.
 
-    Example: "You should update".
+    Example: "You should update."
 
 Working directory
     See 'Directory, working'.
--- a/mercurial/help/hg.1.txt	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/help/hg.1.txt	Tue Oct 20 15:59:10 2015 -0500
@@ -104,7 +104,7 @@
 
 Resources
 """""""""
-Main Web Site: http://mercurial.selenic.com/
+Main Web Site: https://mercurial-scm.org/
 
 Source code repository: http://selenic.com/hg
 
--- a/mercurial/help/hgweb.txt	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/help/hgweb.txt	Tue Oct 20 15:59:10 2015 -0500
@@ -74,7 +74,7 @@
 
 Many commands take a ``{revision}`` URL parameter. This defines the
 changeset to operate on. This is commonly specified as the short,
-12 digit hexidecimal abbreviation for the full 40 character unique
+12 digit hexadecimal abbreviation for the full 40 character unique
 revision identifier. However, any value described by
 :hg:`help revisions` typically works.
 
--- a/mercurial/help/scripting.txt	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/help/scripting.txt	Tue Oct 20 15:59:10 2015 -0500
@@ -50,15 +50,15 @@
     invoking ``hg`` processes.
 
 HGENCODING
-   If not set, the locale used by Mercurial will be detected from the
-   environment. If the determined locale does not support display of
-   certain characters, Mercurial may render these character sequences
-   incorrectly (often by using "?" as a placeholder for invalid
-   characters in the current locale).
+    If not set, the locale used by Mercurial will be detected from the
+    environment. If the determined locale does not support display of
+    certain characters, Mercurial may render these character sequences
+    incorrectly (often by using "?" as a placeholder for invalid
+    characters in the current locale).
 
-   Explicitly setting this environment variable is a good practice to
-   guarantee consistent results. "utf-8" is a good choice on UNIX-like
-   environments.
+    Explicitly setting this environment variable is a good practice to
+    guarantee consistent results. "utf-8" is a good choice on UNIX-like
+    environments.
 
 HGRCPATH
     If not set, Mercurial will inherit config options from config files
--- a/mercurial/help/templates.txt	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/help/templates.txt	Tue Oct 20 15:59:10 2015 -0500
@@ -69,6 +69,10 @@
 
    $ hg log -r 0 --template "{date(date, '%Y')}\n"
 
+- Display date in UTC::
+
+   $ hg log -r 0 --template "{localdate(date, 'UTC')|date}\n"
+
 - Output the description set to a fill-width of 30::
 
    $ hg log -r 0 --template "{fill(desc, 30)}"
@@ -98,10 +102,18 @@
 
    $ hg log --template "{bookmarks % '{bookmark}{ifeq(bookmark, active, '*')} '}\n"
 
+- Find the previous release candidate tag, the distance and changes since the tag::
+
+   $ hg log -r . --template "{latesttag('re:^.*-rc$') % '{tag}, {changes}, {distance}'}\n"
+
 - Mark the working copy parent with '@'::
 
    $ hg log --template "{ifcontains(rev, revset('.'), '@')}\n"
 
+- Show details of parent revisions::
+
+   $ hg log --template "{revset('parents(%d)', rev) % '{desc|firstline}\n'}"
+
 - Show only commit descriptions that start with "template"::
 
    $ hg log --template "{startswith('template', firstline(desc))}\n"
--- a/mercurial/hg.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/hg.py	Tue Oct 20 15:59:10 2015 -0500
@@ -6,17 +6,41 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-from i18n import _
-from lock import release
-from node import nullid
+from __future__ import absolute_import
+
+import errno
+import os
+import shutil
+
+from .i18n import _
+from .node import nullid
 
-import localrepo, bundlerepo, unionrepo, httppeer, sshpeer, statichttprepo
-import bookmarks, lock, util, extensions, error, node, scmutil, phases, url
-import cmdutil, discovery, repoview, exchange
-import ui as uimod
-import merge as mergemod
-import verify as verifymod
-import errno, os, shutil
+from . import (
+    bookmarks,
+    bundlerepo,
+    cmdutil,
+    discovery,
+    error,
+    exchange,
+    extensions,
+    httppeer,
+    localrepo,
+    lock,
+    merge as mergemod,
+    node,
+    phases,
+    repoview,
+    scmutil,
+    sshpeer,
+    statichttprepo,
+    ui as uimod,
+    unionrepo,
+    url,
+    util,
+    verify as verifymod,
+)
+
+release = lock.release
 
 def _local(path):
     path = util.expandpath(util.urllocalpath(path))
@@ -41,7 +65,7 @@
 
     if not peer.capable('branchmap'):
         if branches:
-            raise util.Abort(_("remote branch lookup not supported"))
+            raise error.Abort(_("remote branch lookup not supported"))
         revs.append(hashbranch)
         return revs, revs[0]
     branchmap = peer.branchmap()
@@ -49,7 +73,7 @@
     def primary(branch):
         if branch == '.':
             if not lrepo:
-                raise util.Abort(_("dirstate branch not accessible"))
+                raise error.Abort(_("dirstate branch not accessible"))
             branch = lrepo.dirstate.branch()
         if branch in branchmap:
             revs.extend(node.hex(r) for r in reversed(branchmap[branch]))
@@ -136,7 +160,7 @@
     peer = _peerorrepo(ui, path, create)
     repo = peer.local()
     if not repo:
-        raise util.Abort(_("repository '%s' is not local") %
+        raise error.Abort(_("repository '%s' is not local") %
                          (path or peer.url()))
     return repo.filtered('visible')
 
@@ -170,7 +194,7 @@
     '''create a shared repository'''
 
     if not islocal(source):
-        raise util.Abort(_('can only share local repositories'))
+        raise error.Abort(_('can only share local repositories'))
 
     if not dest:
         dest = defaultdest(source)
@@ -193,7 +217,7 @@
     destvfs = scmutil.vfs(os.path.join(destwvfs.base, '.hg'), realpath=True)
 
     if destvfs.lexists():
-        raise util.Abort(_('destination already exists'))
+        raise error.Abort(_('destination already exists'))
 
     if not destwvfs.isdir():
         destwvfs.mkdir()
@@ -296,7 +320,7 @@
     revs = None
     if rev:
         if not srcpeer.capable('lookup'):
-            raise util.Abort(_("src repository does not support "
+            raise error.Abort(_("src repository does not support "
                                "revision lookup and so doesn't "
                                "support clone by revision"))
         revs = [srcpeer.lookup(r) for r in rev]
@@ -392,14 +416,14 @@
     source = util.urllocalpath(source)
 
     if not dest:
-        raise util.Abort(_("empty destination path is not valid"))
+        raise error.Abort(_("empty destination path is not valid"))
 
     destvfs = scmutil.vfs(dest, expandpath=True)
     if destvfs.lexists():
         if not destvfs.isdir():
-            raise util.Abort(_("destination '%s' already exists") % dest)
+            raise error.Abort(_("destination '%s' already exists") % dest)
         elif destvfs.listdir():
-            raise util.Abort(_("destination '%s' is not empty") % dest)
+            raise error.Abort(_("destination '%s' is not empty") % dest)
 
     shareopts = shareopts or {}
     sharepool = shareopts.get('pool')
@@ -424,7 +448,7 @@
         elif sharenamemode == 'remote':
             sharepath = os.path.join(sharepool, util.sha1(source).hexdigest())
         else:
-            raise util.Abort('unknown share naming mode: %s' % sharenamemode)
+            raise error.Abort('unknown share naming mode: %s' % sharenamemode)
 
         if sharepath:
             return clonewithshare(ui, peeropts, sharepath, source, srcpeer,
@@ -470,7 +494,7 @@
             except OSError as inst:
                 if inst.errno == errno.EEXIST:
                     cleandir = None
-                    raise util.Abort(_("destination '%s' already exists")
+                    raise error.Abort(_("destination '%s' already exists")
                                      % dest)
                 raise
 
@@ -510,14 +534,14 @@
             except OSError as inst:
                 if inst.errno == errno.EEXIST:
                     cleandir = None
-                    raise util.Abort(_("destination '%s' already exists")
+                    raise error.Abort(_("destination '%s' already exists")
                                      % dest)
                 raise
 
             revs = None
             if rev:
                 if not srcpeer.capable('lookup'):
-                    raise util.Abort(_("src repository does not support "
+                    raise error.Abort(_("src repository does not support "
                                        "revision lookup and so doesn't "
                                        "support clone by revision"))
                 revs = [srcpeer.lookup(r) for r in rev]
@@ -533,7 +557,8 @@
                 exchange.push(srcrepo, destpeer, revs=revs,
                               bookmarks=srcrepo._bookmarks.keys())
             else:
-                raise util.Abort(_("clone from remote to remote not supported"))
+                raise error.Abort(_("clone from remote to remote not supported")
+                                 )
 
         cleandir = None
 
@@ -558,7 +583,11 @@
                     try:
                         uprev = destrepo.lookup(checkout)
                     except error.RepoLookupError:
-                        pass
+                        if update is not True:
+                            try:
+                                uprev = destrepo.lookup(update)
+                            except error.RepoLookupError:
+                                pass
                 if uprev is None:
                     try:
                         uprev = destrepo._bookmarks['@']
@@ -568,7 +597,7 @@
                             status = _("updating to bookmark @\n")
                         else:
                             status = (_("updating to bookmark @ on branch %s\n")
-                                       % bn)
+                                      % bn)
                     except KeyError:
                         try:
                             uprev = destrepo.branchtip('default')
@@ -737,10 +766,6 @@
     recurse()
     return 0 # exit code is zero since we found outgoing changes
 
-def revert(repo, node, choose):
-    """revert changes to revision in node without updating dirstate"""
-    return mergemod.update(repo, node, False, True, choose)[3] > 0
-
 def verify(repo):
     """verify the consistency of a repository"""
     ret = verifymod.verify(repo)
@@ -796,3 +821,77 @@
         dst.setconfig('web', 'cacerts', util.expandpath(v), 'copied')
 
     return dst
+
+# Files of interest
+# Used to check if the repository has changed looking at mtime and size of
+# these files.
+foi = [('spath', '00changelog.i'),
+       ('spath', 'phaseroots'), # ! phase can change content at the same size
+       ('spath', 'obsstore'),
+       ('path', 'bookmarks'), # ! bookmark can change content at the same size
+      ]
+
+class cachedlocalrepo(object):
+    """Holds a localrepository that can be cached and reused."""
+
+    def __init__(self, repo):
+        """Create a new cached repo from an existing repo.
+
+        We assume the passed in repo was recently created. If the
+        repo has changed between when it was created and when it was
+        turned into a cache, it may not refresh properly.
+        """
+        assert isinstance(repo, localrepo.localrepository)
+        self._repo = repo
+        self._state, self.mtime = self._repostate()
+
+    def fetch(self):
+        """Refresh (if necessary) and return a repository.
+
+        If the cached instance is out of date, it will be recreated
+        automatically and returned.
+
+        Returns a tuple of the repo and a boolean indicating whether a new
+        repo instance was created.
+        """
+        # We compare the mtimes and sizes of some well-known files to
+        # determine if the repo changed. This is not precise, as mtimes
+        # are susceptible to clock skew and imprecise filesystems and
+        # file content can change while maintaining the same size.
+
+        state, mtime = self._repostate()
+        if state == self._state:
+            return self._repo, False
+
+        self._repo = repository(self._repo.baseui, self._repo.url())
+        self._state = state
+        self.mtime = mtime
+
+        return self._repo, True
+
+    def _repostate(self):
+        state = []
+        maxmtime = -1
+        for attr, fname in foi:
+            prefix = getattr(self._repo, attr)
+            p = os.path.join(prefix, fname)
+            try:
+                st = os.stat(p)
+            except OSError:
+                st = os.stat(prefix)
+            state.append((st.st_mtime, st.st_size))
+            maxmtime = max(maxmtime, st.st_mtime)
+
+        return tuple(state), maxmtime
+
+    def copy(self):
+        """Obtain a copy of this class instance.
+
+        A new localrepository instance is obtained. The new instance should be
+        completely independent of the original.
+        """
+        repo = repository(self._repo.baseui, self._repo.origroot)
+        c = cachedlocalrepo(repo)
+        c._state = self._state
+        c.mtime = self.mtime
+        return c
--- a/mercurial/hgweb/common.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/hgweb/common.py	Tue Oct 20 15:59:10 2015 -0500
@@ -80,12 +80,9 @@
     def __init__(self, code, message=None, headers=[]):
         if message is None:
             message = _statusmessage(code)
-        Exception.__init__(self)
+        Exception.__init__(self, message)
         self.code = code
-        self.message = message
         self.headers = headers
-    def __str__(self):
-        return self.message
 
 class continuereader(object):
     def __init__(self, f, write):
--- a/mercurial/hgweb/hgweb_mod.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/hgweb/hgweb_mod.py	Tue Oct 20 15:59:10 2015 -0500
@@ -6,11 +6,11 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-import os, re
+import contextlib
+import os
 from mercurial import ui, hg, hook, error, encoding, templater, util, repoview
 from mercurial.templatefilters import websub
-from mercurial.i18n import _
-from common import get_stat, ErrorResponse, permhooks, caching
+from common import ErrorResponse, permhooks, caching
 from common import HTTP_OK, HTTP_NOT_MODIFIED, HTTP_BAD_REQUEST
 from common import HTTP_NOT_FOUND, HTTP_SERVER_ERROR
 from request import wsgirequest
@@ -26,15 +26,6 @@
     'pushkey': 'push',
 }
 
-## Files of interest
-# Used to check if the repository has changed looking at mtime and size of
-# theses files. This should probably be relocated a bit higher in core.
-foi = [('spath', '00changelog.i'),
-       ('spath', 'phaseroots'), # ! phase can change content at the same size
-       ('spath', 'obsstore'),
-       ('path', 'bookmarks'), # ! bookmark can change content at the same size
-      ]
-
 def makebreadcrumb(url, prefix=''):
     '''Return a 'URL breadcrumb' list
 
@@ -60,8 +51,145 @@
         urlel = os.path.dirname(urlel)
     return reversed(breadcrumb)
 
+class requestcontext(object):
+    """Holds state/context for an individual request.
+
+    Servers can be multi-threaded. Holding state on the WSGI application
+    is prone to race conditions. Instances of this class exist to hold
+    mutable and race-free state for requests.
+    """
+    def __init__(self, app, repo):
+        self.repo = repo
+        self.reponame = app.reponame
+
+        self.archives = ('zip', 'gz', 'bz2')
+
+        self.maxchanges = self.configint('web', 'maxchanges', 10)
+        self.stripecount = self.configint('web', 'stripes', 1)
+        self.maxshortchanges = self.configint('web', 'maxshortchanges', 60)
+        self.maxfiles = self.configint('web', 'maxfiles', 10)
+        self.allowpull = self.configbool('web', 'allowpull', True)
+
+        # we use untrusted=False to prevent a repo owner from using
+        # web.templates in .hg/hgrc to get access to any file readable
+        # by the user running the CGI script
+        self.templatepath = self.config('web', 'templates', untrusted=False)
+
+        # This object is more expensive to build than simple config values.
+        # It is shared across requests. The app will replace the object
+        # if it is updated. Since this is a reference and nothing should
+        # modify the underlying object, it should be constant for the lifetime
+        # of the request.
+        self.websubtable = app.websubtable
+
+    # Trust the settings from the .hg/hgrc files by default.
+    def config(self, section, name, default=None, untrusted=True):
+        return self.repo.ui.config(section, name, default,
+                                   untrusted=untrusted)
+
+    def configbool(self, section, name, default=False, untrusted=True):
+        return self.repo.ui.configbool(section, name, default,
+                                       untrusted=untrusted)
+
+    def configint(self, section, name, default=None, untrusted=True):
+        return self.repo.ui.configint(section, name, default,
+                                      untrusted=untrusted)
+
+    def configlist(self, section, name, default=None, untrusted=True):
+        return self.repo.ui.configlist(section, name, default,
+                                       untrusted=untrusted)
+
+    archivespecs = {
+        'bz2': ('application/x-bzip2', 'tbz2', '.tar.bz2', None),
+        'gz': ('application/x-gzip', 'tgz', '.tar.gz', None),
+        'zip': ('application/zip', 'zip', '.zip', None),
+    }
+
+    def archivelist(self, nodeid):
+        allowed = self.configlist('web', 'allow_archive')
+        for typ, spec in self.archivespecs.iteritems():
+            if typ in allowed or self.configbool('web', 'allow%s' % typ):
+                yield {'type': typ, 'extension': spec[2], 'node': nodeid}
+
+    def templater(self, req):
+        # determine scheme, port and server name
+        # this is needed to create absolute urls
+
+        proto = req.env.get('wsgi.url_scheme')
+        if proto == 'https':
+            proto = 'https'
+            default_port = '443'
+        else:
+            proto = 'http'
+            default_port = '80'
+
+        port = req.env['SERVER_PORT']
+        port = port != default_port and (':' + port) or ''
+        urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port)
+        logourl = self.config('web', 'logourl', 'https://mercurial-scm.org/')
+        logoimg = self.config('web', 'logoimg', 'hglogo.png')
+        staticurl = self.config('web', 'staticurl') or req.url + 'static/'
+        if not staticurl.endswith('/'):
+            staticurl += '/'
+
+        # some functions for the templater
+
+        def motd(**map):
+            yield self.config('web', 'motd', '')
+
+        # figure out which style to use
+
+        vars = {}
+        styles = (
+            req.form.get('style', [None])[0],
+            self.config('web', 'style'),
+            'paper',
+        )
+        style, mapfile = templater.stylemap(styles, self.templatepath)
+        if style == styles[0]:
+            vars['style'] = style
+
+        start = req.url[-1] == '?' and '&' or '?'
+        sessionvars = webutil.sessionvars(vars, start)
+
+        if not self.reponame:
+            self.reponame = (self.config('web', 'name')
+                             or req.env.get('REPO_NAME')
+                             or req.url.strip('/') or self.repo.root)
+
+        def websubfilter(text):
+            return websub(text, self.websubtable)
+
+        # create the templater
+
+        tmpl = templater.templater(mapfile,
+                                   filters={'websub': websubfilter},
+                                   defaults={'url': req.url,
+                                             'logourl': logourl,
+                                             'logoimg': logoimg,
+                                             'staticurl': staticurl,
+                                             'urlbase': urlbase,
+                                             'repo': self.reponame,
+                                             'encoding': encoding.encoding,
+                                             'motd': motd,
+                                             'sessionvars': sessionvars,
+                                             'pathdef': makebreadcrumb(req.url),
+                                             'style': style,
+                                            })
+        return tmpl
+
 
 class hgweb(object):
+    """HTTP server for individual repositories.
+
+    Instances of this class serve HTTP responses for a particular
+    repository.
+
+    Instances are typically used as WSGI applications.
+
+    Some servers are multi-threaded. On these servers, there may
+    be multiple active threads inside __call__.
+    """
     def __init__(self, repo, name=None, baseui=None):
         if isinstance(repo, str):
             if baseui:
@@ -73,93 +201,62 @@
             # we trust caller to give us a private copy
             r = repo
 
-        r = self._getview(r)
         r.ui.setconfig('ui', 'report_untrusted', 'off', 'hgweb')
         r.baseui.setconfig('ui', 'report_untrusted', 'off', 'hgweb')
         r.ui.setconfig('ui', 'nontty', 'true', 'hgweb')
         r.baseui.setconfig('ui', 'nontty', 'true', 'hgweb')
+        # resolve file patterns relative to repo root
+        r.ui.setconfig('ui', 'forcecwd', r.root, 'hgweb')
+        r.baseui.setconfig('ui', 'forcecwd', r.root, 'hgweb')
         # displaying bundling progress bar while serving feel wrong and may
         # break some wsgi implementation.
         r.ui.setconfig('progress', 'disable', 'true', 'hgweb')
         r.baseui.setconfig('progress', 'disable', 'true', 'hgweb')
-        self.repo = r
+        self._repos = [hg.cachedlocalrepo(self._webifyrepo(r))]
+        self._lastrepo = self._repos[0]
         hook.redirect(True)
-        self.repostate = ((-1, -1), (-1, -1))
-        self.mtime = -1
         self.reponame = name
-        self.archives = 'zip', 'gz', 'bz2'
-        self.stripecount = 1
-        # we use untrusted=False to prevent a repo owner from using
-        # web.templates in .hg/hgrc to get access to any file readable
-        # by the user running the CGI script
-        self.templatepath = self.config('web', 'templates', untrusted=False)
-        self.websubtable = self.loadwebsub()
 
-    # The CGI scripts are often run by a user different from the repo owner.
-    # Trust the settings from the .hg/hgrc files by default.
-    def config(self, section, name, default=None, untrusted=True):
-        return self.repo.ui.config(section, name, default,
-                                   untrusted=untrusted)
-
-    def configbool(self, section, name, default=False, untrusted=True):
-        return self.repo.ui.configbool(section, name, default,
-                                       untrusted=untrusted)
+    def _webifyrepo(self, repo):
+        repo = getwebview(repo)
+        self.websubtable = webutil.getwebsubs(repo)
+        return repo
 
-    def configlist(self, section, name, default=None, untrusted=True):
-        return self.repo.ui.configlist(section, name, default,
-                                       untrusted=untrusted)
+    @contextlib.contextmanager
+    def _obtainrepo(self):
+        """Obtain a repo unique to the caller.
 
-    def _getview(self, repo):
-        """The 'web.view' config controls changeset filter to hgweb. Possible
-        values are ``served``, ``visible`` and ``all``. Default is ``served``.
-        The ``served`` filter only shows changesets that can be pulled from the
-        hgweb instance.  The``visible`` filter includes secret changesets but
-        still excludes "hidden" one.
-
-        See the repoview module for details.
+        Internally we maintain a stack of cachedlocalrepo instances
+        to be handed out. If one is available, we pop it and return it,
+        ensuring it is up to date in the process. If one is not available,
+        we clone the most recently used repo instance and return it.
 
-        The option has been around undocumented since Mercurial 2.5, but no
-        user ever asked about it. So we better keep it undocumented for now."""
-        viewconfig = repo.ui.config('web', 'view', 'served',
-                                    untrusted=True)
-        if viewconfig == 'all':
-            return repo.unfiltered()
-        elif viewconfig in repoview.filtertable:
-            return repo.filtered(viewconfig)
+        It is currently possible for the stack to grow without bounds
+        if the server allows infinite threads. However, servers should
+        have a thread limit, thus establishing our limit.
+        """
+        if self._repos:
+            cached = self._repos.pop()
+            r, created = cached.fetch()
         else:
-            return repo.filtered('served')
+            cached = self._lastrepo.copy()
+            r, created = cached.fetch()
+        if created:
+            r = self._webifyrepo(r)
 
-    def refresh(self, request=None):
-        repostate = []
-        mtime = 0
-        # file of interrests mtime and size
-        for meth, fname in foi:
-            prefix = getattr(self.repo, meth)
-            st = get_stat(prefix, fname)
-            repostate.append((st.st_mtime, st.st_size))
-            mtime = max(mtime, st.st_mtime)
-        repostate = tuple(repostate)
-        # we need to compare file size in addition to mtime to catch
-        # changes made less than a second ago
-        if repostate != self.repostate:
-            r = hg.repository(self.repo.baseui, self.repo.url())
-            self.repo = self._getview(r)
-            self.maxchanges = int(self.config("web", "maxchanges", 10))
-            self.stripecount = int(self.config("web", "stripes", 1))
-            self.maxshortchanges = int(self.config("web", "maxshortchanges",
-                                                   60))
-            self.maxfiles = int(self.config("web", "maxfiles", 10))
-            self.allowpull = self.configbool("web", "allowpull", True)
-            encoding.encoding = self.config("web", "encoding",
-                                            encoding.encoding)
-            # update these last to avoid threads seeing empty settings
-            self.repostate = repostate
-            # mtime is needed for ETag
-            self.mtime = mtime
-        if request:
-            self.repo.ui.environ = request.env
+        self._lastrepo = cached
+        self.mtime = cached.mtime
+        try:
+            yield r
+        finally:
+            self._repos.append(cached)
 
     def run(self):
+        """Start a server from CGI environment.
+
+        Modern servers should be using WSGI and should avoid this
+        method, if possible.
+        """
         if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
             raise RuntimeError("This function is only intended to be "
                                "called while running as a CGI script.")
@@ -167,12 +264,29 @@
         wsgicgi.launch(self)
 
     def __call__(self, env, respond):
+        """Run the WSGI application.
+
+        This may be called by multiple threads.
+        """
         req = wsgirequest(env, respond)
         return self.run_wsgi(req)
 
     def run_wsgi(self, req):
+        """Internal method to run the WSGI application.
 
-        self.refresh(req)
+        This is typically only called by Mercurial. External consumers
+        should be using instances of this class as the WSGI application.
+        """
+        with self._obtainrepo() as repo:
+            for r in self._runwsgi(req, repo):
+                yield r
+
+    def _runwsgi(self, req, repo):
+        rctx = requestcontext(self, repo)
+
+        # This state is global across all threads.
+        encoding.encoding = rctx.config('web', 'encoding', encoding.encoding)
+        rctx.repo.ui.environ = req.env
 
         # work with CGI variables to create coherent structure
         # use SCRIPT_NAME, PATH_INFO and QUERY_STRING as well as our REPO_NAME
@@ -203,8 +317,8 @@
                 if query:
                     raise ErrorResponse(HTTP_NOT_FOUND)
                 if cmd in perms:
-                    self.check_perm(req, perms[cmd])
-                return protocol.call(self.repo, req, cmd)
+                    self.check_perm(rctx, req, perms[cmd])
+                return protocol.call(rctx.repo, req, cmd)
             except ErrorResponse as inst:
                 # A client that sends unbundle without 100-continue will
                 # break if we respond early.
@@ -216,7 +330,7 @@
                 else:
                     req.headers.append(('Connection', 'Close'))
                 req.respond(inst, protocol.HGTYPE,
-                            body='0\n%s\n' % inst.message)
+                            body='0\n%s\n' % inst)
                 return ''
 
         # translate user-visible url structure to internal structure
@@ -249,7 +363,7 @@
 
             if cmd == 'archive':
                 fn = req.form['node'][0]
-                for type_, spec in self.archive_specs.iteritems():
+                for type_, spec in rctx.archivespecs.iteritems():
                     ext = spec[2]
                     if fn.endswith(ext):
                         req.form['node'] = [fn[:-len(ext)]]
@@ -258,28 +372,28 @@
         # process the web interface request
 
         try:
-            tmpl = self.templater(req)
+            tmpl = rctx.templater(req)
             ctype = tmpl('mimetype', encoding=encoding.encoding)
             ctype = templater.stringify(ctype)
 
             # check read permissions non-static content
             if cmd != 'static':
-                self.check_perm(req, None)
+                self.check_perm(rctx, req, None)
 
             if cmd == '':
                 req.form['cmd'] = [tmpl.cache['default']]
                 cmd = req.form['cmd'][0]
 
-            if self.configbool('web', 'cache', True):
+            if rctx.configbool('web', 'cache', True):
                 caching(self, req) # sets ETag header or raises NOT_MODIFIED
             if cmd not in webcommands.__all__:
                 msg = 'no such method: %s' % cmd
                 raise ErrorResponse(HTTP_BAD_REQUEST, msg)
             elif cmd == 'file' and 'raw' in req.form.get('style', []):
-                self.ctype = ctype
-                content = webcommands.rawfile(self, req, tmpl)
+                rctx.ctype = ctype
+                content = webcommands.rawfile(rctx, req, tmpl)
             else:
-                content = getattr(webcommands, cmd)(self, req, tmpl)
+                content = getattr(webcommands, cmd)(rctx, req, tmpl)
                 req.respond(HTTP_OK, ctype)
 
             return content
@@ -299,129 +413,29 @@
             if inst.code == HTTP_NOT_MODIFIED:
                 # Not allowed to return a body on a 304
                 return ['']
-            return tmpl('error', error=inst.message)
-
-    def loadwebsub(self):
-        websubtable = []
-        websubdefs = self.repo.ui.configitems('websub')
-        # we must maintain interhg backwards compatibility
-        websubdefs += self.repo.ui.configitems('interhg')
-        for key, pattern in websubdefs:
-            # grab the delimiter from the character after the "s"
-            unesc = pattern[1]
-            delim = re.escape(unesc)
+            return tmpl('error', error=str(inst))
 
-            # identify portions of the pattern, taking care to avoid escaped
-            # delimiters. the replace format and flags are optional, but
-            # delimiters are required.
-            match = re.match(
-                r'^s%s(.+)(?:(?<=\\\\)|(?<!\\))%s(.*)%s([ilmsux])*$'
-                % (delim, delim, delim), pattern)
-            if not match:
-                self.repo.ui.warn(_("websub: invalid pattern for %s: %s\n")
-                                  % (key, pattern))
-                continue
-
-            # we need to unescape the delimiter for regexp and format
-            delim_re = re.compile(r'(?<!\\)\\%s' % delim)
-            regexp = delim_re.sub(unesc, match.group(1))
-            format = delim_re.sub(unesc, match.group(2))
+    def check_perm(self, rctx, req, op):
+        for permhook in permhooks:
+            permhook(rctx, req, op)
 
-            # the pattern allows for 6 regexp flags, so set them if necessary
-            flagin = match.group(3)
-            flags = 0
-            if flagin:
-                for flag in flagin.upper():
-                    flags |= re.__dict__[flag]
-
-            try:
-                regexp = re.compile(regexp, flags)
-                websubtable.append((regexp, format))
-            except re.error:
-                self.repo.ui.warn(_("websub: invalid regexp for %s: %s\n")
-                                  % (key, regexp))
-        return websubtable
-
-    def templater(self, req):
-
-        # determine scheme, port and server name
-        # this is needed to create absolute urls
-
-        proto = req.env.get('wsgi.url_scheme')
-        if proto == 'https':
-            proto = 'https'
-            default_port = "443"
-        else:
-            proto = 'http'
-            default_port = "80"
+def getwebview(repo):
+    """The 'web.view' config controls changeset filter to hgweb. Possible
+    values are ``served``, ``visible`` and ``all``. Default is ``served``.
+    The ``served`` filter only shows changesets that can be pulled from the
+    hgweb instance.  The``visible`` filter includes secret changesets but
+    still excludes "hidden" one.
 
-        port = req.env["SERVER_PORT"]
-        port = port != default_port and (":" + port) or ""
-        urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port)
-        logourl = self.config("web", "logourl", "http://mercurial.selenic.com/")
-        logoimg = self.config("web", "logoimg", "hglogo.png")
-        staticurl = self.config("web", "staticurl") or req.url + 'static/'
-        if not staticurl.endswith('/'):
-            staticurl += '/'
-
-        # some functions for the templater
-
-        def motd(**map):
-            yield self.config("web", "motd", "")
-
-        # figure out which style to use
-
-        vars = {}
-        styles = (
-            req.form.get('style', [None])[0],
-            self.config('web', 'style'),
-            'paper',
-        )
-        style, mapfile = templater.stylemap(styles, self.templatepath)
-        if style == styles[0]:
-            vars['style'] = style
-
-        start = req.url[-1] == '?' and '&' or '?'
-        sessionvars = webutil.sessionvars(vars, start)
-
-        if not self.reponame:
-            self.reponame = (self.config("web", "name")
-                             or req.env.get('REPO_NAME')
-                             or req.url.strip('/') or self.repo.root)
+    See the repoview module for details.
 
-        def websubfilter(text):
-            return websub(text, self.websubtable)
-
-        # create the templater
+    The option has been around undocumented since Mercurial 2.5, but no
+    user ever asked about it. So we better keep it undocumented for now."""
+    viewconfig = repo.ui.config('web', 'view', 'served',
+                                untrusted=True)
+    if viewconfig == 'all':
+        return repo.unfiltered()
+    elif viewconfig in repoview.filtertable:
+        return repo.filtered(viewconfig)
+    else:
+        return repo.filtered('served')
 
-        tmpl = templater.templater(mapfile,
-                                   filters={"websub": websubfilter},
-                                   defaults={"url": req.url,
-                                             "logourl": logourl,
-                                             "logoimg": logoimg,
-                                             "staticurl": staticurl,
-                                             "urlbase": urlbase,
-                                             "repo": self.reponame,
-                                             "encoding": encoding.encoding,
-                                             "motd": motd,
-                                             "sessionvars": sessionvars,
-                                             "pathdef": makebreadcrumb(req.url),
-                                             "style": style,
-                                            })
-        return tmpl
-
-    def archivelist(self, nodeid):
-        allowed = self.configlist("web", "allow_archive")
-        for i, spec in self.archive_specs.iteritems():
-            if i in allowed or self.configbool("web", "allow" + i):
-                yield {"type" : i, "extension" : spec[2], "node" : nodeid}
-
-    archive_specs = {
-        'bz2': ('application/x-bzip2', 'tbz2', '.tar.bz2', None),
-        'gz': ('application/x-gzip', 'tgz', '.tar.gz', None),
-        'zip': ('application/zip', 'zip', '.zip', None),
-        }
-
-    def check_perm(self, req, op):
-        for permhook in permhooks:
-            permhook(self, req, op)
--- a/mercurial/hgweb/hgwebdir_mod.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/hgweb/hgwebdir_mod.py	Tue Oct 20 15:59:10 2015 -0500
@@ -79,17 +79,30 @@
     return name, str(port), path
 
 class hgwebdir(object):
-    refreshinterval = 20
+    """HTTP server for multiple repositories.
 
+    Given a configuration, different repositories will be served depending
+    on the request path.
+
+    Instances are typically used as WSGI applications.
+    """
     def __init__(self, conf, baseui=None):
         self.conf = conf
         self.baseui = baseui
+        self.ui = None
         self.lastrefresh = 0
         self.motd = None
         self.refresh()
 
     def refresh(self):
-        if self.lastrefresh + self.refreshinterval > time.time():
+        refreshinterval = 20
+        if self.ui:
+            refreshinterval = self.ui.configint('web', 'refreshinterval',
+                                                refreshinterval)
+
+        # refreshinterval <= 0 means to always refresh.
+        if (refreshinterval > 0 and
+            self.lastrefresh + refreshinterval > time.time()):
             return
 
         if self.baseui:
@@ -105,7 +118,7 @@
         if not isinstance(self.conf, (dict, list, tuple)):
             map = {'paths': 'hgweb-paths'}
             if not os.path.exists(self.conf):
-                raise util.Abort(_('config file %s not found!') % self.conf)
+                raise error.Abort(_('config file %s not found!') % self.conf)
             u.readconfig(self.conf, remap=map, trust=True)
             paths = []
             for name, ignored in u.configitems('hgweb-paths'):
@@ -446,7 +459,7 @@
 
         start = url[-1] == '?' and '&' or '?'
         sessionvars = webutil.sessionvars(vars, start)
-        logourl = config('web', 'logourl', 'http://mercurial.selenic.com/')
+        logourl = config('web', 'logourl', 'https://mercurial-scm.org/')
         logoimg = config('web', 'logoimg', 'hglogo.png')
         staticurl = config('web', 'staticurl') or url + 'static/'
         if not staticurl.endswith('/'):
--- a/mercurial/hgweb/request.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/hgweb/request.py	Tue Oct 20 15:59:10 2015 -0500
@@ -40,6 +40,12 @@
     return form
 
 class wsgirequest(object):
+    """Higher-level API for a WSGI request.
+
+    WSGI applications are invoked with 2 arguments. They are used to
+    instantiate instances of this class, which provides higher-level APIs
+    for obtaining request parameters, writing HTTP output, etc.
+    """
     def __init__(self, wsgienv, start_response):
         version = wsgienv['wsgi.version']
         if (version < (1, 0)) or (version >= (2, 0)):
@@ -94,7 +100,7 @@
                     self.headers = [(k, v) for (k, v) in self.headers if
                                     k in ('Date', 'ETag', 'Expires',
                                           'Cache-Control', 'Vary')]
-                status = statusmessage(status.code, status.message)
+                status = statusmessage(status.code, str(status))
             elif status == 200:
                 status = '200 Script output follows'
             elif isinstance(status, int):
--- a/mercurial/hgweb/server.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/hgweb/server.py	Tue Oct 20 15:59:10 2015 -0500
@@ -208,7 +208,7 @@
             import OpenSSL
             OpenSSL.SSL.Context
         except ImportError:
-            raise util.Abort(_("SSL support is unavailable"))
+            raise error.Abort(_("SSL support is unavailable"))
         ctx = OpenSSL.SSL.Context(OpenSSL.SSL.TLSv1_METHOD)
         ctx.use_privatekey_file(ssl_cert)
         ctx.use_certificate_file(ssl_cert)
@@ -239,7 +239,7 @@
             pass
 
 class _httprequesthandlerssl(_httprequesthandler):
-    """HTTPS handler based on Pythons ssl module (introduced in 2.6)"""
+    """HTTPS handler based on Python's ssl module"""
 
     url_scheme = 'https'
 
@@ -249,7 +249,7 @@
             import ssl
             ssl.wrap_socket
         except ImportError:
-            raise util.Abort(_("SSL support is unavailable"))
+            raise error.Abort(_("SSL support is unavailable"))
         httpserver.socket = ssl.wrap_socket(
             httpserver.socket, server_side=True,
             certfile=ssl_cert, ssl_version=ssl.PROTOCOL_TLSv1)
@@ -345,5 +345,5 @@
     try:
         return cls(ui, app, (address, port), handler)
     except socket.error as inst:
-        raise util.Abort(_("cannot start server at '%s:%d': %s")
+        raise error.Abort(_("cannot start server at '%s:%d': %s")
                          % (address, port, inst.args[1]))
--- a/mercurial/hgweb/webcommands.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/hgweb/webcommands.py	Tue Oct 20 15:59:10 2015 -0500
@@ -639,35 +639,10 @@
 
     The ``branches`` template is rendered.
     """
-    tips = []
-    heads = web.repo.heads()
-    parity = paritygen(web.stripecount)
-    sortkey = lambda item: (not item[1], item[0].rev())
-
-    def entries(limit, **map):
-        count = 0
-        if not tips:
-            for tag, hs, tip, closed in web.repo.branchmap().iterbranches():
-                tips.append((web.repo[tip], closed))
-        for ctx, closed in sorted(tips, key=sortkey, reverse=True):
-            if limit > 0 and count >= limit:
-                return
-            count += 1
-            if closed:
-                status = 'closed'
-            elif ctx.node() not in heads:
-                status = 'inactive'
-            else:
-                status = 'open'
-            yield {'parity': parity.next(),
-                   'branch': ctx.branch(),
-                   'status': status,
-                   'node': ctx.hex(),
-                   'date': ctx.date()}
-
+    entries = webutil.branchentries(web.repo, web.stripecount)
+    latestentry = webutil.branchentries(web.repo, web.stripecount, 1)
     return tmpl('branches', node=hex(web.repo.changelog.tip()),
-                entries=lambda **x: entries(0, **x),
-                latestentry=lambda **x: entries(1, **x))
+                entries=entries, latestentry=latestentry)
 
 @webcommand('summary')
 def summary(web, req, tmpl):
@@ -710,18 +685,6 @@
                    'date': web.repo[n].date(),
                    'node': hex(n)}
 
-    def branches(**map):
-        parity = paritygen(web.stripecount)
-
-        b = web.repo.branchmap()
-        l = [(-web.repo.changelog.rev(tip), tip, tag)
-             for tag, heads, tip, closed in b.iterbranches()]
-        for r, n, t in sorted(l):
-            yield {'parity': parity.next(),
-                   'branch': t,
-                   'node': hex(n),
-                   'date': web.repo[n].date()}
-
     def changelist(**map):
         parity = paritygen(web.stripecount, offset=start - end)
         l = [] # build a list in forward order for efficiency
@@ -761,7 +724,7 @@
                 lastchange=tip.date(),
                 tags=tagentries,
                 bookmarks=bookmarks,
-                branches=branches,
+                branches=webutil.branchentries(web.repo, web.stripecount, 10),
                 shortlog=changelist,
                 node=tip.hex(),
                 symrev='tip',
@@ -777,7 +740,7 @@
 
     The ``filediff`` template is rendered.
 
-    This hander is registered under both the ``/diff`` and ``/filediff``
+    This handler is registered under both the ``/diff`` and ``/filediff``
     paths. ``/diff`` is used in modern code.
     """
     fctx, ctx = None, None
@@ -1115,7 +1078,7 @@
                 raise ErrorResponse(HTTP_NOT_FOUND,
                     'file(s) not found: %s' % file[0])
 
-    mimetype, artype, extension, encoding = web.archive_specs[type_]
+    mimetype, artype, extension, encoding = web.archivespecs[type_]
     headers = [
         ('Content-Disposition', 'attachment; filename=%s%s' % (name, extension))
         ]
--- a/mercurial/hgweb/webutil.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/hgweb/webutil.py	Tue Oct 20 15:59:10 2015 -0500
@@ -7,6 +7,7 @@
 # GNU General Public License version 2 or any later version.
 
 import os, copy
+import re
 from mercurial import match, patch, error, ui, util, pathutil, context
 from mercurial.i18n import _
 from mercurial.node import hex, nullid, short
@@ -199,11 +200,42 @@
     for t in repo.nodebookmarks(node):
         yield tmpl(t1, bookmark=t, **args)
 
+def branchentries(repo, stripecount, limit=0):
+    tips = []
+    heads = repo.heads()
+    parity = paritygen(stripecount)
+    sortkey = lambda item: (not item[1], item[0].rev())
+
+    def entries(**map):
+        count = 0
+        if not tips:
+            for tag, hs, tip, closed in repo.branchmap().iterbranches():
+                tips.append((repo[tip], closed))
+        for ctx, closed in sorted(tips, key=sortkey, reverse=True):
+            if limit > 0 and count >= limit:
+                return
+            count += 1
+            if closed:
+                status = 'closed'
+            elif ctx.node() not in heads:
+                status = 'inactive'
+            else:
+                status = 'open'
+            yield {
+                'parity': parity.next(),
+                'branch': ctx.branch(),
+                'status': status,
+                'node': ctx.hex(),
+                'date': ctx.date()
+            }
+
+    return entries
+
 def cleanpath(repo, path):
     path = path.lstrip('/')
     return pathutil.canonpath(repo.root, '', path)
 
-def changeidctx (repo, changeid):
+def changeidctx(repo, changeid):
     try:
         ctx = repo[changeid]
     except error.RepoError:
@@ -212,11 +244,11 @@
 
     return ctx
 
-def changectx (repo, req):
+def changectx(repo, req):
     changeid = "tip"
     if 'node' in req.form:
         changeid = req.form['node'][0]
-        ipos=changeid.find(':')
+        ipos = changeid.find(':')
         if ipos != -1:
             changeid = changeid[(ipos + 1):]
     elif 'manifest' in req.form:
@@ -227,7 +259,7 @@
 def basechangectx(repo, req):
     if 'node' in req.form:
         changeid = req.form['node'][0]
-        ipos=changeid.find(':')
+        ipos = changeid.find(':')
         if ipos != -1:
             changeid = changeid[:ipos]
             return changeidctx(repo, changeid)
@@ -509,3 +541,44 @@
     # default termwidth breaks under mod_wsgi
     def termwidth(self):
         return 80
+
+def getwebsubs(repo):
+    websubtable = []
+    websubdefs = repo.ui.configitems('websub')
+    # we must maintain interhg backwards compatibility
+    websubdefs += repo.ui.configitems('interhg')
+    for key, pattern in websubdefs:
+        # grab the delimiter from the character after the "s"
+        unesc = pattern[1]
+        delim = re.escape(unesc)
+
+        # identify portions of the pattern, taking care to avoid escaped
+        # delimiters. the replace format and flags are optional, but
+        # delimiters are required.
+        match = re.match(
+            r'^s%s(.+)(?:(?<=\\\\)|(?<!\\))%s(.*)%s([ilmsux])*$'
+            % (delim, delim, delim), pattern)
+        if not match:
+            repo.ui.warn(_("websub: invalid pattern for %s: %s\n")
+                              % (key, pattern))
+            continue
+
+        # we need to unescape the delimiter for regexp and format
+        delim_re = re.compile(r'(?<!\\)\\%s' % delim)
+        regexp = delim_re.sub(unesc, match.group(1))
+        format = delim_re.sub(unesc, match.group(2))
+
+        # the pattern allows for 6 regexp flags, so set them if necessary
+        flagin = match.group(3)
+        flags = 0
+        if flagin:
+            for flag in flagin.upper():
+                flags |= re.__dict__[flag]
+
+        try:
+            regexp = re.compile(regexp, flags)
+            websubtable.append((regexp, format))
+        except re.error:
+            repo.ui.warn(_("websub: invalid regexp for %s: %s\n")
+                         % (key, regexp))
+    return websubtable
--- a/mercurial/hook.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/hook.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,9 +5,19 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-from i18n import _
-import os, sys, time
-import extensions, util, demandimport, error
+from __future__ import absolute_import
+
+import os
+import sys
+import time
+
+from .i18n import _
+from . import (
+    demandimport,
+    error,
+    extensions,
+    util,
+)
 
 def _pythonhook(ui, repo, name, hname, funcname, args, throw):
     '''call python hook. hook is callable object, looked up as
@@ -25,8 +35,9 @@
     else:
         d = funcname.rfind('.')
         if d == -1:
-            raise util.Abort(_('%s hook is invalid ("%s" not in '
-                               'a module)') % (hname, funcname))
+            raise error.HookLoadError(
+                _('%s hook is invalid ("%s" not in a module)')
+                % (hname, funcname))
         modname = funcname[:d]
         oldpaths = sys.path
         if util.mainfrozen():
@@ -53,21 +64,21 @@
                         ui.warn(_('exception from second failed import '
                                   'attempt:\n'))
                     ui.traceback(e2)
-                    raise util.Abort(_('%s hook is invalid '
-                                       '(import of "%s" failed)') %
-                                     (hname, modname))
+                    raise error.HookLoadError(
+                        _('%s hook is invalid (import of "%s" failed)') %
+                        (hname, modname))
         sys.path = oldpaths
         try:
             for p in funcname.split('.')[1:]:
                 obj = getattr(obj, p)
         except AttributeError:
-            raise util.Abort(_('%s hook is invalid '
-                               '("%s" is not defined)') %
-                             (hname, funcname))
+            raise error.HookLoadError(
+                _('%s hook is invalid ("%s" is not defined)')
+                % (hname, funcname))
         if not callable(obj):
-            raise util.Abort(_('%s hook is invalid '
-                               '("%s" is not callable)') %
-                             (hname, funcname))
+            raise error.HookLoadError(
+                _('%s hook is invalid ("%s" is not callable)')
+                % (hname, funcname))
 
     ui.note(_("calling hook %s: %s\n") % (hname, funcname))
     starttime = time.time()
@@ -81,7 +92,7 @@
 
         r = obj(ui=ui, repo=repo, hooktype=name, **args)
     except Exception as exc:
-        if isinstance(exc, util.Abort):
+        if isinstance(exc, error.Abort):
             ui.warn(_('error: %s hook failed: %s\n') %
                          (hname, exc.args[0]))
         else:
@@ -90,7 +101,7 @@
         if throw:
             raise
         ui.traceback()
-        return True
+        return True, True
     finally:
         sys.stdout, sys.stderr, sys.stdin = old
         duration = time.time() - starttime
@@ -100,13 +111,20 @@
         if throw:
             raise error.HookAbort(_('%s hook failed') % hname)
         ui.warn(_('warning: %s hook failed\n') % hname)
-    return r
+    return r, False
 
 def _exthook(ui, repo, name, cmd, args, throw):
     ui.note(_("running hook %s: %s\n") % (name, cmd))
 
     starttime = time.time()
     env = {}
+
+    # make in-memory changes visible to external process
+    tr = repo.currenttransaction()
+    repo.dirstate.write(tr)
+    if tr and tr.writepending():
+        env['HG_PENDING'] = repo.root
+
     for k, v in args.iteritems():
         if callable(v):
             v = v()
@@ -151,14 +169,23 @@
     if not ui.callhooks:
         return False
 
+    hooks = []
+    for hname, cmd in _allhooks(ui):
+        if hname.split('.')[0] == name and cmd:
+            hooks.append((hname, cmd))
+
+    res = runhooks(ui, repo, name, hooks, throw=throw, **args)
     r = False
+    for hname, cmd in hooks:
+        r = res[hname][0] or r
+    return r
+
+def runhooks(ui, repo, name, hooks, throw=False, **args):
+    res = {}
     oldstdout = -1
 
     try:
-        for hname, cmd in _allhooks(ui):
-            if hname.split('.')[0] != name or not cmd:
-                continue
-
+        for hname, cmd in hooks:
             if oldstdout == -1 and _redirect:
                 try:
                     stdoutno = sys.__stdout__.fileno()
@@ -173,7 +200,7 @@
                     pass
 
             if callable(cmd):
-                r = _pythonhook(ui, repo, name, hname, cmd, args, throw) or r
+                r, raised = _pythonhook(ui, repo, name, hname, cmd, args, throw)
             elif cmd.startswith('python:'):
                 if cmd.count(':') >= 2:
                     path, cmd = cmd[7:].rsplit(':', 1)
@@ -188,9 +215,13 @@
                     hookfn = getattr(mod, cmd)
                 else:
                     hookfn = cmd[7:].strip()
-                r = _pythonhook(ui, repo, name, hname, hookfn, args, throw) or r
+                r, raised = _pythonhook(ui, repo, name, hname, hookfn, args,
+                                        throw)
             else:
-                r = _exthook(ui, repo, hname, cmd, args, throw) or r
+                r = _exthook(ui, repo, hname, cmd, args, throw)
+                raised = False
+
+            res[hname] = r, raised
 
             # The stderr is fully buffered on Windows when connected to a pipe.
             # A forcible flush is required to make small stderr data in the
@@ -201,4 +232,4 @@
             os.dup2(oldstdout, stdoutno)
             os.close(oldstdout)
 
-    return r
+    return res
--- a/mercurial/httpconnection.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/httpconnection.py	Tue Oct 20 15:59:10 2015 -0500
@@ -107,7 +107,9 @@
 
 class HTTPConnection(httpclient.HTTPConnection):
     response_class = HTTPResponse
-    def request(self, method, uri, body=None, headers={}):
+    def request(self, method, uri, body=None, headers=None):
+        if headers is None:
+            headers = {}
         if isinstance(body, httpsendfile):
             body.seek(0)
         httpclient.HTTPConnection.request(self, method, uri, body=body,
--- a/mercurial/httppeer.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/httppeer.py	Tue Oct 20 15:59:10 2015 -0500
@@ -6,12 +6,28 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-from node import nullid
-from i18n import _
+from __future__ import absolute_import
+
+import errno
+import httplib
+import os
+import socket
 import tempfile
-import changegroup, statichttprepo, error, httpconnection, url, util, wireproto
-import os, urllib, urllib2, zlib, httplib
-import errno, socket
+import urllib
+import urllib2
+import zlib
+
+from .i18n import _
+from .node import nullid
+from . import (
+    changegroup,
+    error,
+    httpconnection,
+    statichttprepo,
+    url,
+    util,
+    wireproto,
+)
 
 def zgenerator(f):
     zd = zlib.decompressobj()
@@ -33,7 +49,7 @@
         self.requestbuilder = None
         u = util.url(path)
         if u.query or u.fragment:
-            raise util.Abort(_('unsupported URL component: "%s"') %
+            raise error.Abort(_('unsupported URL component: "%s"') %
                              (u.query or u.fragment))
 
         # urllib cannot handle URLs with embedded user or passwd
@@ -70,7 +86,7 @@
         return self.caps
 
     def lock(self):
-        raise util.Abort(_('operation not supported over http'))
+        raise error.Abort(_('operation not supported over http'))
 
     def _callstream(self, cmd, **args):
         if cmd == 'pushkey':
@@ -121,7 +137,7 @@
             resp = self.urlopener.open(req)
         except urllib2.HTTPError as inst:
             if inst.code == 401:
-                raise util.Abort(_('authorization failed'))
+                raise error.Abort(_('authorization failed'))
             raise
         except httplib.HTTPException as inst:
             self.ui.debug('http error while sending %s command\n' % cmd)
@@ -129,7 +145,7 @@
             raise IOError(None, inst)
         except IndexError:
             # this only happens with Python 2.3, later versions raise URLError
-            raise util.Abort(_('http error, possibly caused by proxy setting'))
+            raise error.Abort(_('http error, possibly caused by proxy setting'))
         # record the url we got redirected to
         resp_url = resp.geturl()
         if resp_url.endswith(qs):
@@ -207,8 +223,8 @@
             return vals
         except socket.error as err:
             if err.args[0] in (errno.ECONNRESET, errno.EPIPE):
-                raise util.Abort(_('push failed: %s') % err.args[1])
-            raise util.Abort(err.args[1])
+                raise error.Abort(_('push failed: %s') % err.args[1])
+            raise error.Abort(err.args[1])
         finally:
             fp.close()
             os.unlink(tempname)
@@ -247,13 +263,13 @@
 class httpspeer(httppeer):
     def __init__(self, ui, path):
         if not url.has_https:
-            raise util.Abort(_('Python support for SSL and HTTPS '
+            raise error.Abort(_('Python support for SSL and HTTPS '
                                'is not installed'))
         httppeer.__init__(self, ui, path)
 
 def instance(ui, path, create):
     if create:
-        raise util.Abort(_('cannot create new http repository'))
+        raise error.Abort(_('cannot create new http repository'))
     try:
         if path.startswith('https:'):
             inst = httpspeer(ui, path)
--- a/mercurial/i18n.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/i18n.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,8 +5,14 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-import encoding
-import gettext as gettextmod, sys, os, locale
+from __future__ import absolute_import
+
+import gettext as gettextmod
+import locale
+import os
+import sys
+
+from . import encoding
 
 # modelled after templater.templatepath:
 if getattr(sys, 'frozen', None) is not None:
--- a/mercurial/localrepo.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/localrepo.py	Tue Oct 20 15:59:10 2015 -0500
@@ -11,7 +11,7 @@
 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
 import lock as lockmod
 import transaction, store, encoding, exchange, bundle2
-import scmutil, util, extensions, hook, error, revset
+import scmutil, util, extensions, hook, error, revset, cmdutil
 import match as matchmod
 import merge as mergemod
 import tags as tagsmod
@@ -159,7 +159,7 @@
         return self._repo.lock()
 
     def addchangegroup(self, cg, source, url):
-        return changegroup.addchangegroup(self._repo, cg, source, url)
+        return cg.apply(self._repo, source, url)
 
     def pushkey(self, namespace, key, old, new):
         return self._repo.pushkey(namespace, key, old, new)
@@ -300,6 +300,7 @@
         if create:
             self._writerequirements()
 
+        self._dirstatevalidatewarned = False
 
         self._branchcaches = {}
         self._revbranchcache = None
@@ -354,6 +355,10 @@
         manifestcachesize = self.ui.configint('format', 'manifestcachesize')
         if manifestcachesize is not None:
             self.svfs.options['manifestcachesize'] = manifestcachesize
+        # experimental config: format.aggressivemergedeltas
+        aggressivemergedeltas = self.ui.configbool('format',
+            'aggressivemergedeltas', False)
+        self.svfs.options['aggressivemergedeltas'] = aggressivemergedeltas
 
     def _writerequirements(self):
         scmutil.writerequires(self.vfs, self.requirements)
@@ -472,19 +477,19 @@
 
     @repofilecache('dirstate')
     def dirstate(self):
-        warned = [0]
-        def validate(node):
-            try:
-                self.changelog.rev(node)
-                return node
-            except error.LookupError:
-                if not warned[0]:
-                    warned[0] = True
-                    self.ui.warn(_("warning: ignoring unknown"
-                                   " working parent %s!\n") % short(node))
-                return nullid
+        return dirstate.dirstate(self.vfs, self.ui, self.root,
+                                 self._dirstatevalidate)
 
-        return dirstate.dirstate(self.vfs, self.ui, self.root, validate)
+    def _dirstatevalidate(self, node):
+        try:
+            self.changelog.rev(node)
+            return node
+        except error.LookupError:
+            if not self._dirstatevalidatewarned:
+                self._dirstatevalidatewarned = True
+                self.ui.warn(_("warning: ignoring unknown"
+                               " working parent %s!\n") % short(node))
+            return nullid
 
     def __getitem__(self, changeid):
         if changeid is None or changeid == wdirrev:
@@ -538,7 +543,7 @@
         return hook.hook(self.ui, self, name, throw, **args)
 
     @unfilteredmethod
-    def _tag(self, names, node, message, local, user, date, extra={},
+    def _tag(self, names, node, message, local, user, date, extra=None,
              editor=False):
         if isinstance(names, str):
             names = (names,)
@@ -635,7 +640,7 @@
         if not local:
             m = matchmod.exact(self.root, '', ['.hgtags'])
             if any(self.status(match=m, unknown=True, ignored=True)):
-                raise util.Abort(_('working copy of .hgtags is changed'),
+                raise error.Abort(_('working copy of .hgtags is changed'),
                                  hint=_('please commit .hgtags manually'))
 
         self.tags() # instantiate the cache
@@ -972,7 +977,7 @@
                 hint=_("run 'hg recover' to clean up transaction"))
 
         # make journal.dirstate contain in-memory changes at this point
-        self.dirstate.write()
+        self.dirstate.write(None)
 
         idbase = "%.40f#%f" % (random.random(), time.time())
         txnid = 'TXN:' + util.sha1(idbase).hexdigest()
@@ -989,16 +994,33 @@
         reporef = weakref.ref(self)
         def validate(tr):
             """will run pre-closing hooks"""
-            pending = lambda: tr.writepending() and self.root or ""
-            reporef().hook('pretxnclose', throw=True, pending=pending,
+            reporef().hook('pretxnclose', throw=True,
                            txnname=desc, **tr.hookargs)
+        def releasefn(tr, success):
+            repo = reporef()
+            if success:
+                # this should be explicitly invoked here, because
+                # in-memory changes aren't written out at closing
+                # transaction, if tr.addfilegenerator (via
+                # dirstate.write or so) isn't invoked while
+                # transaction running
+                repo.dirstate.write(None)
+            else:
+                # prevent in-memory changes from being written out at
+                # the end of outer wlock scope or so
+                repo.dirstate.invalidate()
+
+                # discard all changes (including ones already written
+                # out) in this transaction
+                repo.vfs.rename('journal.dirstate', 'dirstate')
 
         tr = transaction.transaction(rp, self.svfs, vfsmap,
                                      "journal",
                                      "undo",
                                      aftertrans(renames),
                                      self.store.createmode,
-                                     validator=validate)
+                                     validator=validate,
+                                     releasefn=releasefn)
 
         tr.hookargs['txnid'] = txnid
         # note: writing the fncache only during finalize mean that the file is
@@ -1019,6 +1041,9 @@
             reporef().hook('txnabort', throw=False, txnname=desc,
                            **tr2.hookargs)
         tr.addabort('txnabort-hook', txnaborthook)
+        # avoid eager cache invalidation. in-memory data should be identical
+        # to stored data if transaction has no error.
+        tr.addpostclose('refresh-filecachestats', self._refreshfilecachestats)
         self._transref = weakref.ref(tr)
         return tr
 
@@ -1063,20 +1088,22 @@
             lock.release()
 
     def rollback(self, dryrun=False, force=False):
-        wlock = lock = None
+        wlock = lock = dsguard = None
         try:
             wlock = self.wlock()
             lock = self.lock()
             if self.svfs.exists("undo"):
-                return self._rollback(dryrun, force)
+                dsguard = cmdutil.dirstateguard(self, 'rollback')
+
+                return self._rollback(dryrun, force, dsguard)
             else:
                 self.ui.warn(_("no rollback information available\n"))
                 return 1
         finally:
-            release(lock, wlock)
+            release(dsguard, lock, wlock)
 
     @unfilteredmethod # Until we get smarter cache management
-    def _rollback(self, dryrun, force):
+    def _rollback(self, dryrun, force, dsguard):
         ui = self.ui
         try:
             args = self.vfs.read('undo.desc').splitlines()
@@ -1098,7 +1125,7 @@
             desc = None
 
         if not force and self['.'] != self['tip'] and desc == 'commit':
-            raise util.Abort(
+            raise error.Abort(
                 _('rollback of last commit while not checked out '
                   'may lose data'), hint=_('use -f to force'))
 
@@ -1119,6 +1146,9 @@
         parentgone = (parents[0] not in self.changelog.nodemap or
                       parents[1] not in self.changelog.nodemap)
         if parentgone:
+            # prevent dirstateguard from overwriting already restored one
+            dsguard.close()
+
             self.vfs.rename('undo.dirstate', 'dirstate')
             try:
                 branch = self.vfs.read('undo.branch')
@@ -1196,9 +1226,25 @@
         self.invalidate()
         self.invalidatedirstate()
 
-    def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc):
+    def _refreshfilecachestats(self, tr):
+        """Reload stats of cached files so that they are flagged as valid"""
+        for k, ce in self._filecache.items():
+            if k == 'dirstate' or k not in self.__dict__:
+                continue
+            ce.refresh()
+
+    def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc,
+              inheritchecker=None, parentenvvar=None):
+        parentlock = None
+        # the contents of parentenvvar are used by the underlying lock to
+        # determine whether it can be inherited
+        if parentenvvar is not None:
+            parentlock = os.environ.get(parentenvvar)
         try:
-            l = lockmod.lock(vfs, lockname, 0, releasefn, desc=desc)
+            l = lockmod.lock(vfs, lockname, 0, releasefn=releasefn,
+                             acquirefn=acquirefn, desc=desc,
+                             inheritchecker=inheritchecker,
+                             parentlock=parentlock)
         except error.LockHeld as inst:
             if not wait:
                 raise
@@ -1207,10 +1253,9 @@
             # default to 600 seconds timeout
             l = lockmod.lock(vfs, lockname,
                              int(self.ui.config("ui", "timeout", "600")),
-                             releasefn, desc=desc)
+                             releasefn=releasefn, acquirefn=acquirefn,
+                             desc=desc)
             self.ui.warn(_("got lock after %s seconds\n") % l.delay)
-        if acquirefn:
-            acquirefn()
         return l
 
     def _afterlock(self, callback):
@@ -1238,17 +1283,16 @@
             l.lock()
             return l
 
-        def unlock():
-            for k, ce in self._filecache.items():
-                if k == 'dirstate' or k not in self.__dict__:
-                    continue
-                ce.refresh()
-
-        l = self._lock(self.svfs, "lock", wait, unlock,
+        l = self._lock(self.svfs, "lock", wait, None,
                        self.invalidate, _('repository %s') % self.origroot)
         self._lockref = weakref.ref(l)
         return l
 
+    def _wlockchecktransaction(self):
+        if self.currenttransaction() is not None:
+            raise error.LockInheritanceContractViolation(
+                'wlock cannot be inherited in the middle of a transaction')
+
     def wlock(self, wait=True):
         '''Lock the non-store parts of the repository (everything under
         .hg except .hg/store) and return a weak reference to the lock.
@@ -1262,7 +1306,7 @@
             l.lock()
             return l
 
-        # We do not need to check for non-waiting lock aquisition.  Such
+        # We do not need to check for non-waiting lock acquisition.  Such
         # acquisition would not cause dead-lock as they would just fail.
         if wait and (self.ui.configbool('devel', 'all-warnings')
                      or self.ui.configbool('devel', 'check-locks')):
@@ -1274,16 +1318,31 @@
             if self.dirstate.pendingparentchange():
                 self.dirstate.invalidate()
             else:
-                self.dirstate.write()
+                self.dirstate.write(None)
 
             self._filecache['dirstate'].refresh()
 
         l = self._lock(self.vfs, "wlock", wait, unlock,
                        self.invalidatedirstate, _('working directory of %s') %
-                       self.origroot)
+                       self.origroot,
+                       inheritchecker=self._wlockchecktransaction,
+                       parentenvvar='HG_WLOCK_LOCKER')
         self._wlockref = weakref.ref(l)
         return l
 
+    def _currentlock(self, lockref):
+        """Returns the lock if it's held, or None if it's not."""
+        if lockref is None:
+            return None
+        l = lockref()
+        if l is None or not l.held:
+            return None
+        return l
+
+    def currentwlock(self):
+        """Returns the wlock if it's held, or None if it's not."""
+        return self._currentlock(self._wlockref)
+
     def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
         """
         commit an individual file as part of a larger transaction
@@ -1372,16 +1431,18 @@
 
     @unfilteredmethod
     def commit(self, text="", user=None, date=None, match=None, force=False,
-               editor=False, extra={}):
+               editor=False, extra=None):
         """Add a new revision to current repository.
 
         Revision information is gathered from the working directory,
         match can be used to filter the committed files. If editor is
         supplied, it is called to get a commit message.
         """
+        if extra is None:
+            extra = {}
 
         def fail(f, msg):
-            raise util.Abort('%s: %s' % (f, msg))
+            raise error.Abort('%s: %s' % (f, msg))
 
         if not match:
             match = matchmod.always(self.root, '')
@@ -1397,7 +1458,7 @@
             merge = len(wctx.parents()) > 1
 
             if not force and merge and match.ispartial():
-                raise util.Abort(_('cannot partially commit a merge '
+                raise error.Abort(_('cannot partially commit a merge '
                                    '(do not specify files or patterns)'))
 
             status = self.status(match=match, clean=force)
@@ -1425,12 +1486,12 @@
                             newstate[s] = oldstate[s]
                             continue
                         if not force:
-                            raise util.Abort(
+                            raise error.Abort(
                                 _("commit with new subrepo %s excluded") % s)
                     dirtyreason = wctx.sub(s).dirtyreason(True)
                     if dirtyreason:
                         if not self.ui.configbool('ui', 'commitsubrepos'):
-                            raise util.Abort(dirtyreason,
+                            raise error.Abort(dirtyreason,
                                 hint=_("use --subrepos for recursive commit"))
                         subs.append(s)
                         commitsubs.add(s)
@@ -1447,7 +1508,7 @@
                 if subs:
                     if (not match('.hgsub') and
                         '.hgsub' in (wctx.modified() + wctx.added())):
-                        raise util.Abort(
+                        raise error.Abort(
                             _("can't commit subrepos without .hgsub"))
                     status.modified.insert(0, '.hgsubstate')
 
@@ -1489,13 +1550,23 @@
                 return None
 
             if merge and cctx.deleted():
-                raise util.Abort(_("cannot commit merge with missing files"))
+                raise error.Abort(_("cannot commit merge with missing files"))
 
+            unresolved, driverresolved = False, False
             ms = mergemod.mergestate(self)
             for f in status.modified:
-                if f in ms and ms[f] == 'u':
-                    raise util.Abort(_('unresolved merge conflicts '
-                                       '(see "hg help resolve")'))
+                if f in ms:
+                    if ms[f] == 'u':
+                        unresolved = True
+                    elif ms[f] == 'd':
+                        driverresolved = True
+
+            if unresolved:
+                raise error.Abort(_('unresolved merge conflicts '
+                                    '(see "hg help resolve")'))
+            if driverresolved or ms.mdstate() != 's':
+                raise error.Abort(_('driver-resolved merge conflicts'),
+                                  hint=_('run "hg resolve --all" to resolve'))
 
             if editor:
                 cctx._text = editor(self, cctx, subs)
@@ -1610,10 +1681,9 @@
             n = self.changelog.add(mn, files, ctx.description(),
                                    trp, p1.node(), p2.node(),
                                    user, ctx.date(), ctx.extra().copy())
-            p = lambda: tr.writepending() and self.root or ""
             xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
             self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
-                      parent2=xp2, pending=p)
+                      parent2=xp2)
             # set the new commit is proper phase
             targetphase = subrepo.newcommitphase(self.ui, ctx)
             if targetphase:
@@ -1771,121 +1841,21 @@
         """
         return util.hooks()
 
-    def stream_in(self, remote, remotereqs):
-        # Save remote branchmap. We will use it later
-        # to speed up branchcache creation
-        rbranchmap = None
-        if remote.capable("branchmap"):
-            rbranchmap = remote.branchmap()
-
-        fp = remote.stream_out()
-        l = fp.readline()
-        try:
-            resp = int(l)
-        except ValueError:
-            raise error.ResponseError(
-                _('unexpected response from remote server:'), l)
-        if resp == 1:
-            raise util.Abort(_('operation forbidden by server'))
-        elif resp == 2:
-            raise util.Abort(_('locking the remote repository failed'))
-        elif resp != 0:
-            raise util.Abort(_('the server sent an unknown error code'))
-
-        self.applystreamclone(remotereqs, rbranchmap, fp)
-        return len(self.heads()) + 1
-
-    def applystreamclone(self, remotereqs, remotebranchmap, fp):
-        """Apply stream clone data to this repository.
-
-        "remotereqs" is a set of requirements to handle the incoming data.
-        "remotebranchmap" is the result of a branchmap lookup on the remote. It
-        can be None.
-        "fp" is a file object containing the raw stream data, suitable for
-        feeding into exchange.consumestreamclone.
-        """
-        lock = self.lock()
-        try:
-            exchange.consumestreamclone(self, fp)
-
-            # new requirements = old non-format requirements +
-            #                    new format-related remote requirements
-            # requirements from the streamed-in repository
-            self.requirements = remotereqs | (
-                    self.requirements - self.supportedformats)
-            self._applyopenerreqs()
-            self._writerequirements()
-
-            if remotebranchmap:
-                rbheads = []
-                closed = []
-                for bheads in remotebranchmap.itervalues():
-                    rbheads.extend(bheads)
-                    for h in bheads:
-                        r = self.changelog.rev(h)
-                        b, c = self.changelog.branchinfo(r)
-                        if c:
-                            closed.append(h)
-
-                if rbheads:
-                    rtiprev = max((int(self.changelog.rev(node))
-                            for node in rbheads))
-                    cache = branchmap.branchcache(remotebranchmap,
-                                                  self[rtiprev].node(),
-                                                  rtiprev,
-                                                  closednodes=closed)
-                    # Try to stick it as low as possible
-                    # filter above served are unlikely to be fetch from a clone
-                    for candidate in ('base', 'immutable', 'served'):
-                        rview = self.filtered(candidate)
-                        if cache.validfor(rview):
-                            self._branchcaches[candidate] = cache
-                            cache.write(rview)
-                            break
-            self.invalidate()
-        finally:
-            lock.release()
-
     def clone(self, remote, heads=[], stream=None):
         '''clone remote repository.
 
         keyword arguments:
         heads: list of revs to clone (forces use of pull)
         stream: use streaming clone if possible'''
-
-        # now, all clients that can request uncompressed clones can
-        # read repo formats supported by all servers that can serve
-        # them.
-
-        # if revlog format changes, client will have to check version
-        # and format flags on "stream" capability, and use
-        # uncompressed only if compatible.
-
-        if stream is None:
-            # if the server explicitly prefers to stream (for fast LANs)
-            stream = remote.capable('stream-preferred')
-
-        if stream and not heads:
-            # 'stream' means remote revlog format is revlogv1 only
-            if remote.capable('stream'):
-                self.stream_in(remote, set(('revlogv1',)))
-            else:
-                # otherwise, 'streamreqs' contains the remote revlog format
-                streamreqs = remote.capable('streamreqs')
-                if streamreqs:
-                    streamreqs = set(streamreqs.split(','))
-                    # if we support it, stream in and adjust our requirements
-                    if not streamreqs - self.supportedformats:
-                        self.stream_in(remote, streamreqs)
-
         # internal config: ui.quietbookmarkmove
         quiet = self.ui.backupconfig('ui', 'quietbookmarkmove')
         try:
             self.ui.setconfig('ui', 'quietbookmarkmove', True, 'clone')
-            ret = exchange.pull(self, remote, heads).cgresult
+            pullop = exchange.pull(self, remote, heads,
+                                   streamclonerequested=stream)
+            return pullop.cgresult
         finally:
             self.ui.restoreconfig(quiet)
-        return ret
 
     def pushkey(self, namespace, key, old, new):
         try:
@@ -1893,8 +1863,6 @@
             hookargs = {}
             if tr is not None:
                 hookargs.update(tr.hookargs)
-                pending = lambda: tr.writepending() and self.root or ""
-                hookargs['pending'] = pending
             hookargs['namespace'] = namespace
             hookargs['key'] = key
             hookargs['old'] = old
--- a/mercurial/lock.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/lock.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,10 +5,20 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-import util, error
-import errno, os, socket, time
+from __future__ import absolute_import
+
+import contextlib
+import errno
+import os
+import socket
+import time
 import warnings
 
+from . import (
+    error,
+    util,
+)
+
 class lock(object):
     '''An advisory lock held by one process to control access to a set
     of files.  Non-cooperating processes or incorrectly written scripts
@@ -29,16 +39,24 @@
 
     _host = None
 
-    def __init__(self, vfs, file, timeout=-1, releasefn=None, desc=None):
+    def __init__(self, vfs, file, timeout=-1, releasefn=None, acquirefn=None,
+                 desc=None, inheritchecker=None, parentlock=None):
         self.vfs = vfs
         self.f = file
         self.held = 0
         self.timeout = timeout
         self.releasefn = releasefn
+        self.acquirefn = acquirefn
         self.desc = desc
+        self._inheritchecker = inheritchecker
+        self.parentlock = parentlock
+        self._parentheld = False
+        self._inherited = False
         self.postrelease  = []
-        self.pid = os.getpid()
+        self.pid = self._getpid()
         self.delay = self.lock()
+        if self.acquirefn:
+            self.acquirefn()
 
     def __del__(self):
         if self.held:
@@ -52,11 +70,15 @@
 
         self.release()
 
+    def _getpid(self):
+        # wrapper around os.getpid() to make testing easier
+        return os.getpid()
+
     def lock(self):
         timeout = self.timeout
         while True:
             try:
-                self.trylock()
+                self._trylock()
                 return self.timeout - timeout
             except error.LockHeld as inst:
                 if timeout != 0:
@@ -67,20 +89,31 @@
                 raise error.LockHeld(errno.ETIMEDOUT, inst.filename, self.desc,
                                      inst.locker)
 
-    def trylock(self):
+    def _trylock(self):
         if self.held:
             self.held += 1
             return
         if lock._host is None:
             lock._host = socket.gethostname()
         lockname = '%s:%s' % (lock._host, self.pid)
-        while not self.held:
+        retry = 5
+        while not self.held and retry:
+            retry -= 1
             try:
                 self.vfs.makelock(lockname, self.f)
                 self.held = 1
             except (OSError, IOError) as why:
                 if why.errno == errno.EEXIST:
-                    locker = self.testlock()
+                    locker = self._readlock()
+                    # special case where a parent process holds the lock -- this
+                    # is different from the pid being different because we do
+                    # want the unlock and postrelease functions to be called,
+                    # but the lockfile to not be removed.
+                    if locker == self.parentlock:
+                        self._parentheld = True
+                        self.held = 1
+                        return
+                    locker = self._testlock(locker)
                     if locker is not None:
                         raise error.LockHeld(errno.EAGAIN,
                                              self.vfs.join(self.f), self.desc,
@@ -89,23 +122,22 @@
                     raise error.LockUnavailable(why.errno, why.strerror,
                                                 why.filename, self.desc)
 
-    def testlock(self):
-        """return id of locker if lock is valid, else None.
+    def _readlock(self):
+        """read lock and return its value
 
-        If old-style lock, we cannot tell what machine locker is on.
-        with new-style lock, if locker is on this machine, we can
-        see if locker is alive.  If locker is on this machine but
-        not alive, we can safely break lock.
-
-        The lock file is only deleted when None is returned.
-
+        Returns None if no lock exists, pid for old-style locks, and host:pid
+        for new-style locks.
         """
         try:
-            locker = self.vfs.readlock(self.f)
+            return self.vfs.readlock(self.f)
         except (OSError, IOError) as why:
             if why.errno == errno.ENOENT:
                 return None
             raise
+
+    def _testlock(self, locker):
+        if locker is None:
+            return None
         try:
             host, pid = locker.split(":", 1)
         except ValueError:
@@ -127,6 +159,50 @@
         except error.LockError:
             return locker
 
+    def testlock(self):
+        """return id of locker if lock is valid, else None.
+
+        If old-style lock, we cannot tell what machine locker is on.
+        with new-style lock, if locker is on this machine, we can
+        see if locker is alive.  If locker is on this machine but
+        not alive, we can safely break lock.
+
+        The lock file is only deleted when None is returned.
+
+        """
+        locker = self._readlock()
+        return self._testlock(locker)
+
+    @contextlib.contextmanager
+    def inherit(self):
+        """context for the lock to be inherited by a Mercurial subprocess.
+
+        Yields a string that will be recognized by the lock in the subprocess.
+        Communicating this string to the subprocess needs to be done separately
+        -- typically by an environment variable.
+        """
+        if not self.held:
+            raise error.LockInheritanceContractViolation(
+                'inherit can only be called while lock is held')
+        if self._inherited:
+            raise error.LockInheritanceContractViolation(
+                'inherit cannot be called while lock is already inherited')
+        if self._inheritchecker is not None:
+            self._inheritchecker()
+        if self.releasefn:
+            self.releasefn()
+        if self._parentheld:
+            lockname = self.parentlock
+        else:
+            lockname = '%s:%s' % (lock._host, self.pid)
+        self._inherited = True
+        try:
+            yield lockname
+        finally:
+            if self.acquirefn:
+                self.acquirefn()
+            self._inherited = False
+
     def release(self):
         """release the lock and execute callback function if any
 
@@ -136,19 +212,23 @@
             self.held -= 1
         elif self.held == 1:
             self.held = 0
-            if os.getpid() != self.pid:
+            if self._getpid() != self.pid:
                 # we forked, and are not the parent
                 return
             try:
                 if self.releasefn:
                     self.releasefn()
             finally:
-                try:
-                    self.vfs.unlink(self.f)
-                except OSError:
-                    pass
-            for callback in self.postrelease:
-                callback()
+                if not self._parentheld:
+                    try:
+                        self.vfs.unlink(self.f)
+                    except OSError:
+                        pass
+            # The postrelease functions typically assume the lock is not held
+            # at all.
+            if not self._parentheld:
+                for callback in self.postrelease:
+                    callback()
 
 def release(*locks):
     for lock in locks:
--- a/mercurial/mail.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/mail.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,10 +5,23 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-from i18n import _
-import util, encoding, sslutil
-import os, smtplib, socket, quopri, time, sys
+from __future__ import absolute_import
+
 import email
+import os
+import quopri
+import smtplib
+import socket
+import sys
+import time
+
+from .i18n import _
+from . import (
+    encoding,
+    error,
+    sslutil,
+    util,
+)
 
 _oldheaderinit = email.Header.Header.__init__
 def _unifiedheaderinit(self, *args, **kw):
@@ -19,10 +32,10 @@
     constructor, and 2.7 removed this parameter.
 
     Default argument is continuation_ws=' ', which means that the
-    behaviour is different in <2.7 and 2.7
+    behavior is different in <2.7 and 2.7
 
-    We consider the 2.7 behaviour to be preferable, but need
-    to have an unified behaviour for versions 2.4 to 2.7
+    We consider the 2.7 behavior to be preferable, but need
+    to have an unified behavior for versions 2.4 to 2.7
     """
     # override continuation_ws
     kw['continuation_ws'] = ' '
@@ -47,9 +60,6 @@
         if resp == 220:
             self.sock = sslutil.wrapsocket(self.sock, keyfile, certfile,
                                            **self._sslkwargs)
-            if not util.safehasattr(self.sock, "read"):
-                # using httplib.FakeSocket with Python 2.5.x or earlier
-                self.sock.read = self.sock.recv
             self.file = smtplib.SSLFakeFile(self.sock)
             self.helo_resp = None
             self.ehlo_resp = None
@@ -57,31 +67,27 @@
             self.does_esmtp = 0
         return (resp, reply)
 
-if util.safehasattr(smtplib.SMTP, '_get_socket'):
-    class SMTPS(smtplib.SMTP):
-        '''Derived class to verify the peer certificate for SMTPS.
-
-        This class allows to pass any keyword arguments to SSL socket creation.
-        '''
-        def __init__(self, sslkwargs, keyfile=None, certfile=None, **kwargs):
-            self.keyfile = keyfile
-            self.certfile = certfile
-            smtplib.SMTP.__init__(self, **kwargs)
-            self.default_port = smtplib.SMTP_SSL_PORT
-            self._sslkwargs = sslkwargs
+class SMTPS(smtplib.SMTP):
+    '''Derived class to verify the peer certificate for SMTPS.
 
-        def _get_socket(self, host, port, timeout):
-            if self.debuglevel > 0:
-                print >> sys.stderr, 'connect:', (host, port)
-            new_socket = socket.create_connection((host, port), timeout)
-            new_socket = sslutil.wrapsocket(new_socket,
-                                            self.keyfile, self.certfile,
-                                            **self._sslkwargs)
-            self.file = smtplib.SSLFakeFile(new_socket)
-            return new_socket
-else:
-    def SMTPS(sslkwargs, keyfile=None, certfile=None, **kwargs):
-        raise util.Abort(_('SMTPS requires Python 2.6 or later'))
+    This class allows to pass any keyword arguments to SSL socket creation.
+    '''
+    def __init__(self, sslkwargs, keyfile=None, certfile=None, **kwargs):
+        self.keyfile = keyfile
+        self.certfile = certfile
+        smtplib.SMTP.__init__(self, **kwargs)
+        self.default_port = smtplib.SMTP_SSL_PORT
+        self._sslkwargs = sslkwargs
+
+    def _get_socket(self, host, port, timeout):
+        if self.debuglevel > 0:
+            print >> sys.stderr, 'connect:', (host, port)
+        new_socket = socket.create_connection((host, port), timeout)
+        new_socket = sslutil.wrapsocket(new_socket,
+                                        self.keyfile, self.certfile,
+                                        **self._sslkwargs)
+        self.file = smtplib.SSLFakeFile(new_socket)
+        return new_socket
 
 def _smtp(ui):
     '''build an smtp connection and return a function to send mail'''
@@ -91,14 +97,14 @@
     starttls = tls == 'starttls' or util.parsebool(tls)
     smtps = tls == 'smtps'
     if (starttls or smtps) and not util.safehasattr(socket, 'ssl'):
-        raise util.Abort(_("can't use TLS: Python SSL support not installed"))
+        raise error.Abort(_("can't use TLS: Python SSL support not installed"))
     mailhost = ui.config('smtp', 'host')
     if not mailhost:
-        raise util.Abort(_('smtp.host not configured - cannot send mail'))
+        raise error.Abort(_('smtp.host not configured - cannot send mail'))
     verifycert = ui.config('smtp', 'verifycert', 'strict')
     if verifycert not in ['strict', 'loose']:
         if util.parsebool(verifycert) is not False:
-            raise util.Abort(_('invalid smtp.verifycert configuration: %s')
+            raise error.Abort(_('invalid smtp.verifycert configuration: %s')
                              % (verifycert))
         verifycert = False
     if (starttls or smtps) and verifycert:
@@ -118,7 +124,7 @@
     else:
         defaultport = 25
     mailport = util.getport(ui.config('smtp', 'port', defaultport))
-    ui.note(_('sending mail: smtp host %s, port %s\n') %
+    ui.note(_('sending mail: smtp host %s, port %d\n') %
             (mailhost, mailport))
     s.connect(host=mailhost, port=mailport)
     if starttls:
@@ -139,16 +145,16 @@
         try:
             s.login(username, password)
         except smtplib.SMTPException as inst:
-            raise util.Abort(inst)
+            raise error.Abort(inst)
 
     def send(sender, recipients, msg):
         try:
             return s.sendmail(sender, recipients, msg)
         except smtplib.SMTPRecipientsRefused as inst:
             recipients = [r[1] for r in inst.recipients.values()]
-            raise util.Abort('\n' + '\n'.join(recipients))
+            raise error.Abort('\n' + '\n'.join(recipients))
         except smtplib.SMTPException as inst:
-            raise util.Abort(inst)
+            raise error.Abort(inst)
 
     return send
 
@@ -162,7 +168,7 @@
     fp.write(msg)
     ret = fp.close()
     if ret:
-        raise util.Abort('%s %s' % (
+        raise error.Abort('%s %s' % (
             os.path.basename(program.split(None, 1)[0]),
             util.explainexit(ret)[0]))
 
@@ -196,11 +202,11 @@
     method = ui.config('email', 'method', 'smtp')
     if method == 'smtp':
         if not ui.config('smtp', 'host'):
-            raise util.Abort(_('smtp specified as email transport, '
+            raise error.Abort(_('smtp specified as email transport, '
                                'but no smtp host configured'))
     else:
         if not util.findexe(method):
-            raise util.Abort(_('%r specified as email transport, '
+            raise error.Abort(_('%r specified as email transport, '
                                'but not in PATH') % method)
 
 def mimetextpatch(s, subtype='plain', display=False):
@@ -290,13 +296,13 @@
         dom = dom.decode(encoding.encoding).encode('idna')
         addr = '%s@%s' % (acc, dom)
     except UnicodeDecodeError:
-        raise util.Abort(_('invalid email address: %s') % addr)
+        raise error.Abort(_('invalid email address: %s') % addr)
     except ValueError:
         try:
             # too strict?
             addr = addr.encode('ascii')
         except UnicodeDecodeError:
-            raise util.Abort(_('invalid local address: %s') % addr)
+            raise error.Abort(_('invalid local address: %s') % addr)
     return email.Utils.formataddr((name, addr))
 
 def addressencode(ui, address, charsets=None, display=False):
--- a/mercurial/manifest.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/manifest.py	Tue Oct 20 15:59:10 2015 -0500
@@ -9,6 +9,7 @@
 import mdiff, parsers, error, revlog, util
 import array, struct
 import os
+import heapq
 
 propertycache = util.propertycache
 
@@ -441,13 +442,14 @@
     else:
         return '', f
 
-_noop = lambda: None
+_noop = lambda s: None
 
 class treemanifest(object):
     def __init__(self, dir='', text=''):
         self._dir = dir
         self._node = revlog.nullid
-        self._load = _noop
+        self._loadfunc = _noop
+        self._copyfunc = _noop
         self._dirty = False
         self._dirs = {}
         # Using _lazymanifest here is a little slower than plain old dicts
@@ -475,11 +477,11 @@
         return (not self._files and (not self._dirs or
                 all(m._isempty() for m in self._dirs.values())))
 
-    def __str__(self):
-        return ('<treemanifest dir=%s, node=%s, loaded=%s, dirty=%s>' %
+    def __repr__(self):
+        return ('<treemanifest dir=%s, node=%s, loaded=%s, dirty=%s at 0x%x>' %
                 (self._dir, revlog.hex(self._node),
-                 bool(self._load is _noop),
-                 self._dirty))
+                 bool(self._loadfunc is _noop),
+                 self._dirty, id(self)))
 
     def dir(self):
         '''The directory that this tree manifest represents, including a
@@ -597,6 +599,14 @@
             self._files[f] = n[:21] # to match manifestdict's behavior
         self._dirty = True
 
+    def _load(self):
+        if self._loadfunc is not _noop:
+            lf, self._loadfunc = self._loadfunc, _noop
+            lf(self)
+        elif self._copyfunc is not _noop:
+            cf, self._copyfunc = self._copyfunc, _noop
+            cf(self)
+
     def setflag(self, f, flags):
         """Set the flags (symlink, executable) for path f."""
         assert 'd' not in flags
@@ -614,19 +624,19 @@
         copy = treemanifest(self._dir)
         copy._node = self._node
         copy._dirty = self._dirty
-        def _load():
-            self._load()
-            for d in self._dirs:
-                copy._dirs[d] = self._dirs[d].copy()
-            copy._files = dict.copy(self._files)
-            copy._flags = dict.copy(self._flags)
-            copy._load = _noop
-        copy._load = _load
-        if self._load == _noop:
-            # Chaining _load if it's _noop is functionally correct, but the
-            # chain may end up excessively long (stack overflow), and
-            # will prevent garbage collection of 'self'.
-            copy._load()
+        if self._copyfunc is _noop:
+            def _copyfunc(s):
+                self._load()
+                for d in self._dirs:
+                    s._dirs[d] = self._dirs[d].copy()
+                s._files = dict.copy(self._files)
+                s._flags = dict.copy(self._flags)
+            if self._loadfunc is _noop:
+                _copyfunc(copy)
+            else:
+                copy._copyfunc = _copyfunc
+        else:
+            copy._copyfunc = self._copyfunc
         return copy
 
     def filesnotin(self, m2):
@@ -833,13 +843,10 @@
         return _text(sorted(dirs + files), usemanifestv2)
 
     def read(self, gettext, readsubtree):
-        def _load():
-            # Mark as loaded already here, so __setitem__ and setflag() don't
-            # cause infinite loops when they try to load.
-            self._load = _noop
-            self.parse(gettext(), readsubtree)
-            self._dirty = False
-        self._load = _load
+        def _load_for_read(s):
+            s.parse(gettext(), readsubtree)
+            s._dirty = False
+        self._loadfunc = _load_for_read
 
     def writesubtrees(self, m1, m2, writesubtree):
         self._load() # for consistency; should never have any effect here
@@ -970,12 +977,9 @@
             # revlog layer.
 
             _checkforbidden(added)
-            # combine the changed lists into one list for sorting
-            work = [(x, False) for x in added]
-            work.extend((x, True) for x in removed)
-            # this could use heapq.merge() (from Python 2.6+) or equivalent
-            # since the lists are already sorted
-            work.sort()
+            # combine the changed lists into one sorted iterator
+            work = heapq.merge([(x, False) for x in added],
+                               [(x, True) for x in removed])
 
             arraytext, deltatext = m.fastdelta(self._mancache[p1][1], work)
             cachedelta = self.rev(p1), deltatext
--- a/mercurial/match.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/match.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,9 +5,18 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-import copy, os, re
-import util, pathutil
-from i18n import _
+from __future__ import absolute_import
+
+import copy
+import os
+import re
+
+from .i18n import _
+from . import (
+    error,
+    pathutil,
+    util,
+)
 
 propertycache = util.propertycache
 
@@ -29,7 +38,7 @@
     for kind, pat, source in kindpats:
         if kind == 'set':
             if not ctx:
-                raise util.Abort("fileset expression with no context")
+                raise error.Abort("fileset expression with no context")
             s = ctx.getfileset(pat)
             fset.update(s)
 
@@ -254,7 +263,7 @@
         '''True if the matcher won't always match.
 
         Although it's just the inverse of _always in this implementation,
-        an extenion such as narrowhg might make it return something
+        an extension such as narrowhg might make it return something
         slightly different.'''
         return not self._always
 
@@ -282,7 +291,7 @@
                         files = files.splitlines()
                     files = [f for f in files if f]
                 except EnvironmentError:
-                    raise util.Abort(_("unable to read file list (%s)") % pat)
+                    raise error.Abort(_("unable to read file list (%s)") % pat)
                 for k, p, source in self._normalize(files, default, root, cwd,
                                                     auditor):
                     kindpats.append((k, p, pat))
@@ -294,8 +303,8 @@
                     for k, p, source in self._normalize(includepats, default,
                                                         root, cwd, auditor):
                         kindpats.append((k, p, source or pat))
-                except util.Abort as inst:
-                    raise util.Abort('%s: %s' % (pat, inst[0]))
+                except error.Abort as inst:
+                    raise error.Abort('%s: %s' % (pat, inst[0]))
                 except IOError as inst:
                     if self._warn:
                         self._warn(_("skipping unreadable pattern file "
@@ -579,11 +588,11 @@
                 _rematcher('(?:%s)' % _regex(k, p, globsuffix))
             except re.error:
                 if s:
-                    raise util.Abort(_("%s: invalid pattern (%s): %s") %
+                    raise error.Abort(_("%s: invalid pattern (%s): %s") %
                                      (s, k, p))
                 else:
-                    raise util.Abort(_("invalid pattern (%s): %s") % (k, p))
-        raise util.Abort(_("invalid pattern"))
+                    raise error.Abort(_("invalid pattern (%s): %s") % (k, p))
+        raise error.Abort(_("invalid pattern"))
 
 def _roots(kindpats):
     '''return roots and exact explicitly listed files from patterns
--- a/mercurial/mdiff.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/mdiff.py	Tue Oct 20 15:59:10 2015 -0500
@@ -6,7 +6,7 @@
 # GNU General Public License version 2 or any later version.
 
 from i18n import _
-import bdiff, mpatch, util, base85
+import bdiff, mpatch, util, base85, error
 import re, struct, zlib
 
 def splitnewlines(text):
@@ -59,7 +59,7 @@
         try:
             self.context = int(self.context)
         except ValueError:
-            raise util.Abort(_('diff context lines count must be '
+            raise error.Abort(_('diff context lines count must be '
                                'an integer, not %r') % self.context)
 
     def copy(self, **kwargs):
--- a/mercurial/merge.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/merge.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,13 +5,30 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
+from __future__ import absolute_import
+
+import errno
+import os
+import shutil
 import struct
 
-from node import nullid, nullrev, hex, bin
-from i18n import _
-from mercurial import obsolete
-import error as errormod, util, filemerge, copies, subrepo, worker
-import errno, os, shutil
+from .i18n import _
+from .node import (
+    bin,
+    hex,
+    nullid,
+    nullrev,
+)
+from . import (
+    copies,
+    destutil,
+    error,
+    filemerge,
+    obsolete,
+    subrepo,
+    util,
+    worker,
+)
 
 _pack = struct.pack
 _unpack = struct.unpack
@@ -27,7 +44,7 @@
 
     it is stored on disk when needed. Two file are used, one with an old
     format, one with a new format. Both contains similar data, but the new
-    format can store new kind of field.
+    format can store new kinds of field.
 
     Current new format is a list of arbitrary record of the form:
 
@@ -44,6 +61,16 @@
     L: the node of the "local" part of the merge (hexified version)
     O: the node of the "other" part of the merge (hexified version)
     F: a file to be merged entry
+    D: a file that the external merge driver will merge internally
+       (experimental)
+    m: the external merge driver defined for this merge plus its run state
+       (experimental)
+
+    Merge driver run states (experimental):
+    u: driver-resolved files unmarked -- needs to be run next time we're about
+       to resolve or commit
+    m: driver-resolved files marked -- only needs to be run before commit
+    s: success/skipped -- does not need to be run any more
     '''
     statepathv1 = 'merge/state'
     statepathv2 = 'merge/state2'
@@ -57,9 +84,16 @@
         self._state = {}
         self._local = None
         self._other = None
+        if 'otherctx' in vars(self):
+            del self.otherctx
         if node:
             self._local = node
             self._other = other
+        self._readmergedriver = None
+        if self.mergedriver:
+            self._mdstate = 's'
+        else:
+            self._mdstate = 'u'
         shutil.rmtree(self._repo.join('merge'), True)
         self._dirty = False
 
@@ -72,17 +106,30 @@
         self._state = {}
         self._local = None
         self._other = None
+        if 'otherctx' in vars(self):
+            del self.otherctx
+        self._readmergedriver = None
+        self._mdstate = 's'
         records = self._readrecords()
         for rtype, record in records:
             if rtype == 'L':
                 self._local = bin(record)
             elif rtype == 'O':
                 self._other = bin(record)
-            elif rtype == 'F':
+            elif rtype == 'm':
+                bits = record.split('\0', 1)
+                mdstate = bits[1]
+                if len(mdstate) != 1 or mdstate not in 'ums':
+                    # the merge driver should be idempotent, so just rerun it
+                    mdstate = 'u'
+
+                self._readmergedriver = bits[0]
+                self._mdstate = mdstate
+            elif rtype in 'FD':
                 bits = record.split('\0')
                 self._state[bits[0]] = bits[1:]
             elif not rtype.islower():
-                raise util.Abort(_('unsupported merge state record: %s')
+                raise error.Abort(_('unsupported merge state record: %s')
                                    % rtype)
         self._dirty = False
 
@@ -102,6 +149,25 @@
         returns list of record [(TYPE, data), ...]"""
         v1records = self._readrecordsv1()
         v2records = self._readrecordsv2()
+        if self._v1v2match(v1records, v2records):
+            return v2records
+        else:
+            # v1 file is newer than v2 file, use it
+            # we have to infer the "other" changeset of the merge
+            # we cannot do better than that with v1 of the format
+            mctx = self._repo[None].parents()[-1]
+            v1records.append(('O', mctx.hex()))
+            # add place holder "other" file node information
+            # nobody is using it yet so we do no need to fetch the data
+            # if mctx was wrong `mctx[bits[-2]]` may fails.
+            for idx, r in enumerate(v1records):
+                if r[0] == 'F':
+                    bits = r[1].split('\0')
+                    bits.insert(-2, '')
+                    v1records[idx] = (r[0], '\0'.join(bits))
+            return v1records
+
+    def _v1v2match(self, v1records, v2records):
         oldv2 = set() # old format version of v2 record
         for rec in v2records:
             if rec[0] == 'L':
@@ -111,22 +177,9 @@
                 oldv2.add(('F', _droponode(rec[1])))
         for rec in v1records:
             if rec not in oldv2:
-                # v1 file is newer than v2 file, use it
-                # we have to infer the "other" changeset of the merge
-                # we cannot do better than that with v1 of the format
-                mctx = self._repo[None].parents()[-1]
-                v1records.append(('O', mctx.hex()))
-                # add place holder "other" file node information
-                # nobody is using it yet so we do no need to fetch the data
-                # if mctx was wrong `mctx[bits[-2]]` may fails.
-                for idx, r in enumerate(v1records):
-                    if r[0] == 'F':
-                        bits = r[1].split('\0')
-                        bits.insert(-2, '')
-                        v1records[idx] = (r[0], '\0'.join(bits))
-                return v1records
+                return False
         else:
-            return v2records
+            return True
 
     def _readrecordsv1(self):
         """read on disk merge state for version 1 file
@@ -175,6 +228,29 @@
                 raise
         return records
 
+    @util.propertycache
+    def mergedriver(self):
+        # protect against the following:
+        # - A configures a malicious merge driver in their hgrc, then
+        #   pauses the merge
+        # - A edits their hgrc to remove references to the merge driver
+        # - A gives a copy of their entire repo, including .hg, to B
+        # - B inspects .hgrc and finds it to be clean
+        # - B then continues the merge and the malicious merge driver
+        #  gets invoked
+        configmergedriver = self._repo.ui.config('experimental', 'mergedriver')
+        if (self._readmergedriver is not None
+            and self._readmergedriver != configmergedriver):
+            raise error.ConfigError(
+                _("merge driver changed since merge started"),
+                hint=_("revert merge driver change or abort merge"))
+
+        return configmergedriver
+
+    @util.propertycache
+    def otherctx(self):
+        return self._repo[self._other]
+
     def active(self):
         """Whether mergestate is active.
 
@@ -193,8 +269,14 @@
             records = []
             records.append(('L', hex(self._local)))
             records.append(('O', hex(self._other)))
+            if self.mergedriver:
+                records.append(('m', '\0'.join([
+                    self.mergedriver, self._mdstate])))
             for d, v in self._state.iteritems():
-                records.append(('F', '\0'.join([d] + v)))
+                if v[0] == 'd':
+                    records.append(('D', '\0'.join([d] + v)))
+                else:
+                    records.append(('F', '\0'.join([d] + v)))
             self._writerecords(records)
             self._dirty = False
 
@@ -257,6 +339,9 @@
         self._state[dfile][0] = state
         self._dirty = True
 
+    def mdstate(self):
+        return self._mdstate
+
     def unresolved(self):
         """Obtain the paths of unresolved files."""
 
@@ -264,10 +349,17 @@
             if entry[0] == 'u':
                 yield f
 
-    def resolve(self, dfile, wctx, labels=None):
+    def driverresolved(self):
+        """Obtain the paths of driver-resolved files."""
+
+        for f, entry in self._state.items():
+            if entry[0] == 'd':
+                yield f
+
+    def _resolve(self, preresolve, dfile, wctx, labels=None):
         """rerun merge process for file path `dfile`"""
-        if self[dfile] == 'r':
-            return 0
+        if self[dfile] in 'rd':
+            return True, 0
         stateentry = self._state[dfile]
         state, hash, lfile, afile, anode, ofile, onode, flags = stateentry
         octx = self._repo[self._other]
@@ -279,23 +371,35 @@
         fla = fca.flags()
         if 'x' in flags + flo + fla and 'l' not in flags + flo + fla:
             if fca.node() == nullid:
-                self._repo.ui.warn(_('warning: cannot merge flags for %s\n') %
-                                   afile)
+                if preresolve:
+                    self._repo.ui.warn(
+                        _('warning: cannot merge flags for %s\n') % afile)
             elif flags == fla:
                 flags = flo
-        # restore local
-        f = self._repo.vfs('merge/' + hash)
-        self._repo.wwrite(dfile, f.read(), flags)
-        f.close()
-        r = filemerge.filemerge(self._repo, self._local, lfile, fcd, fco, fca,
-                                labels=labels)
+        if preresolve:
+            # restore local
+            f = self._repo.vfs('merge/' + hash)
+            self._repo.wwrite(dfile, f.read(), flags)
+            f.close()
+            complete, r = filemerge.premerge(self._repo, self._local, lfile,
+                                             fcd, fco, fca, labels=labels)
+        else:
+            complete, r = filemerge.filemerge(self._repo, self._local, lfile,
+                                              fcd, fco, fca, labels=labels)
         if r is None:
             # no real conflict
             del self._state[dfile]
             self._dirty = True
         elif not r:
             self.mark(dfile, 'r')
-        return r
+        return complete, r
+
+    def preresolve(self, dfile, wctx, labels=None):
+        return self._resolve(True, dfile, wctx, labels=labels)
+
+    def resolve(self, dfile, wctx, labels=None):
+        """rerun merge process for file path `dfile`"""
+        return self._resolve(False, dfile, wctx, labels=labels)[1]
 
 def _checkunknownfile(repo, wctx, mctx, f, f2=None):
     if f2 is None:
@@ -324,7 +428,7 @@
     for f in sorted(aborts):
         repo.ui.warn(_("%s: untracked file differs\n") % f)
     if aborts:
-        raise util.Abort(_("untracked files in working directory differ "
+        raise error.Abort(_("untracked files in working directory differ "
                            "from files in requested revision"))
 
     for f, (m, args, msg) in actions.iteritems():
@@ -397,10 +501,33 @@
     for f in sorted(pmmf):
         fold = util.normcase(f)
         if fold in foldmap:
-            raise util.Abort(_("case-folding collision between %s and %s")
+            raise error.Abort(_("case-folding collision between %s and %s")
                              % (f, foldmap[fold]))
         foldmap[fold] = f
 
+    # check case-folding of directories
+    foldprefix = unfoldprefix = lastfull = ''
+    for fold, f in sorted(foldmap.items()):
+        if fold.startswith(foldprefix) and not f.startswith(unfoldprefix):
+            # the folded prefix matches but actual casing is different
+            raise error.Abort(_("case-folding collision between "
+                                "%s and directory of %s") % (lastfull, f))
+        foldprefix = fold + '/'
+        unfoldprefix = f + '/'
+        lastfull = f
+
+def driverpreprocess(repo, ms, wctx, labels=None):
+    """run the preprocess step of the merge driver, if any
+
+    This is currently not implemented -- it's an extension point."""
+    return True
+
+def driverconclude(repo, ms, wctx, labels=None):
+    """run the conclude step of the merge driver, if any
+
+    This is currently not implemented -- it's an extension point."""
+    return True
+
 def manifestmerge(repo, wctx, p2, pa, branchmerge, force, partial,
                   acceptremote, followcopies):
     """
@@ -581,10 +708,14 @@
                 repo, wctx, mctx, ancestor, branchmerge, force, partial,
                 acceptremote, followcopies)
             _checkunknownfiles(repo, wctx, mctx, force, actions)
-            if diverge is None: # and renamedelete is None.
-                # Arbitrarily pick warnings from first iteration
+
+            # Track the shortest set of warning on the theory that bid
+            # merge will correctly incorporate more information
+            if diverge is None or len(diverge1) < len(diverge):
                 diverge = diverge1
+            if renamedelete is None or len(renamedelete) < len(renamedelete1):
                 renamedelete = renamedelete1
+
             for f, a in sorted(actions.iteritems()):
                 m, args, msg = a
                 repo.ui.debug(' %s: %s -> %s\n' % (f, msg, m))
@@ -779,25 +910,6 @@
         repo.ui.debug(" %s: %s -> k\n" % (f, msg))
         # no progress
 
-    # merge
-    for f, args, msg in actions['m']:
-        repo.ui.debug(" %s: %s -> m\n" % (f, msg))
-        z += 1
-        progress(_updating, z, item=f, total=numupdates, unit=_files)
-        if f == '.hgsubstate': # subrepo states need updating
-            subrepo.submerge(repo, wctx, mctx, wctx.ancestor(mctx),
-                             overwrite)
-            continue
-        audit(f)
-        r = ms.resolve(f, wctx, labels=labels)
-        if r is not None and r > 0:
-            unresolved += 1
-        else:
-            if r is None:
-                updated += 1
-            else:
-                merged += 1
-
     # directory rename, move local
     for f, args, msg in actions['dm']:
         repo.ui.debug(" %s: %s -> dm\n" % (f, msg))
@@ -830,7 +942,75 @@
         util.setflags(repo.wjoin(f), 'l' in flags, 'x' in flags)
         updated += 1
 
+    mergeactions = actions['m']
+    # the ordering is important here -- ms.mergedriver will raise if the merge
+    # driver has changed, and we want to be able to bypass it when overwrite is
+    # True
+    usemergedriver = not overwrite and mergeactions and ms.mergedriver
+
+    if usemergedriver:
+        ms.commit()
+        proceed = driverpreprocess(repo, ms, wctx, labels=labels)
+        # the driver might leave some files unresolved
+        unresolvedf = set(ms.unresolved())
+        if not proceed:
+            # XXX setting unresolved to at least 1 is a hack to make sure we
+            # error out
+            return updated, merged, removed, max(len(unresolvedf), 1)
+        newactions = []
+        for f, args, msg in mergeactions:
+            if f in unresolvedf:
+                newactions.append((f, args, msg))
+        mergeactions = newactions
+
+    # premerge
+    tocomplete = []
+    for f, args, msg in actions['m']:
+        repo.ui.debug(" %s: %s -> m (premerge)\n" % (f, msg))
+        z += 1
+        progress(_updating, z, item=f, total=numupdates, unit=_files)
+        if f == '.hgsubstate': # subrepo states need updating
+            subrepo.submerge(repo, wctx, mctx, wctx.ancestor(mctx),
+                             overwrite)
+            continue
+        audit(f)
+        complete, r = ms.preresolve(f, wctx, labels=labels)
+        if complete:
+            if r is not None and r > 0:
+                unresolved += 1
+            else:
+                if r is None:
+                    updated += 1
+                else:
+                    merged += 1
+        else:
+            numupdates += 1
+            tocomplete.append((f, args, msg))
+
+    # merge
+    for f, args, msg in tocomplete:
+        repo.ui.debug(" %s: %s -> m (merge)\n" % (f, msg))
+        z += 1
+        progress(_updating, z, item=f, total=numupdates, unit=_files)
+        r = ms.resolve(f, wctx, labels=labels)
+        if r is not None and r > 0:
+            unresolved += 1
+        else:
+            if r is None:
+                updated += 1
+            else:
+                merged += 1
+
     ms.commit()
+
+    if usemergedriver and not unresolved and ms.mdstate() != 's':
+        if not driverconclude(repo, ms, wctx, labels=labels):
+            # XXX setting unresolved to at least 1 is a hack to make sure we
+            # error out
+            return updated, merged, removed, max(unresolved, 1)
+
+        ms.commit()
+
     progress(_updating, None, total=numupdates, unit=_files)
 
     return updated, merged, removed, unresolved
@@ -969,43 +1149,11 @@
             pas = [repo[ancestor]]
 
         if node is None:
-            # Here is where we should consider bookmarks, divergent bookmarks,
-            # foreground changesets (successors), and tip of current branch;
-            # but currently we are only checking the branch tips.
-            try:
-                node = repo.branchtip(wc.branch())
-            except errormod.RepoLookupError:
-                if wc.branch() == 'default': # no default branch!
-                    node = repo.lookup('tip') # update to tip
-                else:
-                    raise util.Abort(_("branch %s not found") % wc.branch())
-
-            if p1.obsolete() and not p1.children():
-                # allow updating to successors
-                successors = obsolete.successorssets(repo, p1.node())
-
-                # behavior of certain cases is as follows,
-                #
-                # divergent changesets: update to highest rev, similar to what
-                #     is currently done when there are more than one head
-                #     (i.e. 'tip')
-                #
-                # replaced changesets: same as divergent except we know there
-                # is no conflict
-                #
-                # pruned changeset: no update is done; though, we could
-                #     consider updating to the first non-obsolete parent,
-                #     similar to what is current done for 'hg prune'
-
-                if successors:
-                    # flatten the list here handles both divergent (len > 1)
-                    # and the usual case (len = 1)
-                    successors = [n for sub in successors for n in sub]
-
-                    # get the max revision for the given successors set,
-                    # i.e. the 'tip' of a set
-                    node = repo.revs('max(%ln)', successors).first()
-                    pas = [p1]
+            if (repo.ui.configbool('devel', 'all-warnings')
+                    or repo.ui.configbool('devel', 'oldapi')):
+                repo.ui.develwarn('update with no target')
+            rev, _mark, _act = destutil.destupdate(repo)
+            node = repo[rev].node()
 
         overwrite = force and not branchmerge
 
@@ -1021,18 +1169,18 @@
 
         ### check phase
         if not overwrite and len(pl) > 1:
-            raise util.Abort(_("outstanding uncommitted merge"))
+            raise error.Abort(_("outstanding uncommitted merge"))
         if branchmerge:
             if pas == [p2]:
-                raise util.Abort(_("merging with a working directory ancestor"
+                raise error.Abort(_("merging with a working directory ancestor"
                                    " has no effect"))
             elif pas == [p1]:
                 if not mergeancestor and p1.branch() == p2.branch():
-                    raise util.Abort(_("nothing to merge"),
+                    raise error.Abort(_("nothing to merge"),
                                      hint=_("use 'hg update' "
                                             "or check 'hg heads'"))
             if not force and (wc.files() or wc.deleted()):
-                raise util.Abort(_("uncommitted changes"),
+                raise error.Abort(_("uncommitted changes"),
                                  hint=_("use 'hg status' to list changes"))
             for s in sorted(wc.substate):
                 wc.sub(s).bailifchanged()
@@ -1061,11 +1209,11 @@
                         else:
                             hint = _("commit or update --clean to discard"
                                      " changes")
-                        raise util.Abort(msg, hint=hint)
+                        raise error.Abort(msg, hint=hint)
                     else:  # node is none
                         msg = _("not a linear update")
                         hint = _("merge or update --check to force update")
-                        raise util.Abort(msg, hint=hint)
+                        raise error.Abort(msg, hint=hint)
                 else:
                     # Allow jumping branches if clean and specific rev given
                     pas = [p1]
@@ -1158,9 +1306,7 @@
         wlock.release()
 
     if not partial:
-        def updatehook(parent1=xp1, parent2=xp2, error=stats[3]):
-            repo.hook('update', parent1=parent1, parent2=parent2, error=error)
-        repo._afterlock(updatehook)
+        repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
     return stats
 
 def graft(repo, ctx, pctx, labels):
@@ -1191,7 +1337,7 @@
     # drop the second merge parent
     repo.dirstate.beginparentchange()
     repo.setparents(repo['.'].node(), nullid)
-    repo.dirstate.write()
+    repo.dirstate.write(repo.currenttransaction())
     # fix up dirstate for copies and renames
     copies.duplicatecopies(repo, ctx.rev(), pctx.rev())
     repo.dirstate.endparentchange()
--- a/mercurial/minirst.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/minirst.py	Tue Oct 20 15:59:10 2015 -0500
@@ -14,15 +14,20 @@
 are just indented blocks that look like they are nested. This relies
 on the user to keep the right indentation for the blocks.
 
-Remember to update http://mercurial.selenic.com/wiki/HelpStyleGuide
+Remember to update https://mercurial-scm.org/wiki/HelpStyleGuide
 when adding support for new constructs.
 """
 
-import re
-import util, encoding
-from i18n import _
+from __future__ import absolute_import
 
 import cgi
+import re
+
+from .i18n import _
+from . import (
+    encoding,
+    util,
+)
 
 def section(s):
     return "%s\n%s\n\n" % (s, "\"" * encoding.colwidth(s))
@@ -510,7 +515,7 @@
     if block['type'] == 'bullet':
         if block['lines'][0].startswith('| '):
             # Remove bullet for line blocks and add no extra
-            # indention.
+            # indentation.
             block['lines'][0] = block['lines'][0][2:]
         else:
             m = _bulletre.match(block['lines'][0])
@@ -651,13 +656,17 @@
 def format(text, width=80, indent=0, keep=None, style='plain', section=None):
     """Parse and format the text according to width."""
     blocks, pruned = parse(text, indent, keep or [])
+    parents = []
     if section:
         sections = getsections(blocks)
         blocks = []
         i = 0
         while i < len(sections):
             name, nest, b = sections[i]
+            del parents[nest:]
+            parents.append(name)
             if name == section:
+                b[0]['path'] = parents[3:]
                 blocks.extend(b)
 
                 ## Also show all subnested sections
@@ -669,6 +678,14 @@
     if style == 'html':
         text = formathtml(blocks)
     else:
+        if len([b for b in blocks if b['type'] == 'definition']) > 1:
+            i = 0
+            while i < len(blocks):
+                if blocks[i]['type'] == 'definition':
+                    if 'path' in blocks[i]:
+                        blocks[i]['lines'][0] = '"%s"' % '.'.join(
+                            blocks[i]['path'])
+                i += 1
         text = ''.join(formatblock(b, width) for b in blocks)
     if keep is None:
         return text
@@ -705,11 +722,43 @@
                 nest += i
             level = nest.index(i) + 1
             nest = nest[:level]
+            for i in range(1, len(secs) + 1):
+                sec = secs[-i]
+                if sec[1] < level:
+                    break
+                siblings = [a for a in sec[2] if a['type'] == 'definition']
+                if siblings:
+                    siblingindent = siblings[-1]['indent']
+                    indent = b['indent']
+                    if siblingindent < indent:
+                        level += 1
+                        break
+                    elif siblingindent == indent:
+                        level = sec[1]
+                        break
             secs.append((getname(b), level, [b]))
         else:
             if not secs:
                 # add an initial empty section
                 secs = [('', 0, [])]
+            if b['type'] != 'margin':
+                pointer = 1
+                bindent = b['indent']
+                while pointer < len(secs):
+                    section = secs[-pointer][2][0]
+                    if section['type'] != 'margin':
+                        sindent = section['indent']
+                        if len(section['lines']) > 1:
+                            sindent += len(section['lines'][1]) - \
+                              len(section['lines'][1].lstrip(' '))
+                        if bindent >= sindent:
+                            break
+                    pointer += 1
+                if pointer > 1:
+                    blevel = secs[-pointer][1]
+                    if section['type'] != b['type']:
+                        blevel += 1
+                    secs.append(('', blevel, []))
             secs[-1][2].append(b)
     return secs
 
--- a/mercurial/namespaces.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/namespaces.py	Tue Oct 20 15:59:10 2015 -0500
@@ -1,6 +1,10 @@
-from i18n import _
-from mercurial import util
-import templatekw
+from __future__ import absolute_import
+
+from .i18n import _
+from . import (
+    templatekw,
+    util,
+)
 
 def tolist(val):
     """
--- a/mercurial/node.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/node.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,6 +5,8 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
+from __future__ import absolute_import
+
 import binascii
 
 nullrev = -1
--- a/mercurial/obsolete.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/obsolete.py	Tue Oct 20 15:59:10 2015 -0500
@@ -67,8 +67,8 @@
 comment associated with each format for details.
 
 """
-import struct
-import util, base85, node, parsers
+import errno, struct
+import util, base85, node, parsers, error
 import phases
 from i18n import _
 
@@ -164,7 +164,7 @@
         # (metadata will be decoded on demand)
         metadata = data[off:off + mdsize]
         if len(metadata) != mdsize:
-            raise util.Abort(_('parsing obsolete marker: metadata is too '
+            raise error.Abort(_('parsing obsolete marker: metadata is too '
                                'short, %d bytes expected, got %d')
                              % (mdsize, len(metadata)))
         off += mdsize
@@ -200,7 +200,7 @@
 def _fm0encodeonemarker(marker):
     pre, sucs, flags, metadata, date, parents = marker
     if flags & usingsha256:
-        raise util.Abort(_('cannot handle sha256 with old obsstore format'))
+        raise error.Abort(_('cannot handle sha256 with old obsstore format'))
     metadata = dict(metadata)
     time, tz = date
     metadata['date'] = '%r %i' % (time, tz)
@@ -414,7 +414,7 @@
     diskversion = _unpack('>B', data[off:off + 1])[0]
     off += 1
     if diskversion not in formats:
-        raise util.Abort(_('parsing obsolete marker: unknown version %r')
+        raise error.Abort(_('parsing obsolete marker: unknown version %r')
                          % diskversion)
     return diskversion, formats[diskversion][0](data, off)
 
@@ -496,7 +496,7 @@
     """
     for mark in markers:
         if node.nullid in mark[1]:
-            raise util.Abort(_('bad obsolescence marker detected: '
+            raise error.Abort(_('bad obsolescence marker detected: '
                                'invalid successors nullid'))
 
 class obsstore(object):
@@ -520,14 +520,9 @@
     def __init__(self, svfs, defaultformat=_fm1version, readonly=False):
         # caches for various obsolescence related cache
         self.caches = {}
-        self._all = []
         self.svfs = svfs
-        data = svfs.tryread('obsstore')
         self._version = defaultformat
         self._readonly = readonly
-        if data:
-            self._version, markers = _readmarkers(data)
-            self._addmarkers(markers)
 
     def __iter__(self):
         return iter(self._all)
@@ -536,8 +531,24 @@
         return len(self._all)
 
     def __nonzero__(self):
+        if not self._cached('_all'):
+            try:
+                return self.svfs.stat('obsstore').st_size > 1
+            except OSError as inst:
+                if inst.errno != errno.ENOENT:
+                    raise
+                # just build an empty _all list if no obsstore exists, which
+                # avoids further stat() syscalls
+                pass
         return bool(self._all)
 
+    @property
+    def readonly(self):
+        """True if marker creation is disabled
+
+        Remove me in the future when obsolete marker is always on."""
+        return self._readonly
+
     def create(self, transaction, prec, succs=(), flag=0, parents=None,
                date=None, metadata=None):
         """obsolete: add a new obsolete marker
@@ -579,8 +590,8 @@
         Take care of filtering duplicate.
         Return the number of new marker."""
         if self._readonly:
-            raise util.Abort('creating obsolete markers is not enabled on this '
-                             'repo')
+            raise error.Abort('creating obsolete markers is not enabled on '
+                              'this repo')
         known = set(self._all)
         new = []
         for m in markers:
@@ -615,6 +626,16 @@
         return self.add(transaction, markers)
 
     @propertycache
+    def _all(self):
+        data = self.svfs.tryread('obsstore')
+        if not data:
+            return []
+        self._version, markers = _readmarkers(data)
+        markers = list(markers)
+        _checkinvalidmarkers(markers)
+        return markers
+
+    @propertycache
     def successors(self):
         successors = {}
         _addsuccessors(successors, self._all)
@@ -841,15 +862,15 @@
 
 
 def successorssets(repo, initialnode, cache=None):
-    """Return all set of successors of initial nodes
+    """Return set of all latest successors of initial nodes
 
-    The successors set of a changeset A are a group of revisions that succeed
+    The successors set of a changeset A are the group of revisions that succeed
     A. It succeeds A as a consistent whole, each revision being only a partial
     replacement. The successors set contains non-obsolete changesets only.
 
     This function returns the full list of successor sets which is why it
     returns a list of tuples and not just a single tuple. Each tuple is a valid
-    successors set. Not that (A,) may be a valid successors set for changeset A
+    successors set. Note that (A,) may be a valid successors set for changeset A
     (see below).
 
     In most cases, a changeset A will have a single element (e.g. the changeset
@@ -865,7 +886,7 @@
 
     If a changeset A is not obsolete, then it will conceptually have no
     successors set. To distinguish this from a pruned changeset, the successor
-    set will only contain itself, i.e. [(A,)].
+    set will contain itself only, i.e. [(A,)].
 
     Finally, successors unknown locally are considered to be pruned (obsoleted
     without any successors).
@@ -873,10 +894,9 @@
     The optional `cache` parameter is a dictionary that may contain precomputed
     successors sets. It is meant to reuse the computation of a previous call to
     `successorssets` when multiple calls are made at the same time. The cache
-    dictionary is updated in place. The caller is responsible for its live
-    spawn. Code that makes multiple calls to `successorssets` *must* use this
-    cache mechanism or suffer terrible performances.
-
+    dictionary is updated in place. The caller is responsible for its life
+    span. Code that makes multiple calls to `successorssets` *must* use this
+    cache mechanism or suffer terrible performance.
     """
 
     succmarkers = repo.obsstore.successors
@@ -1046,16 +1066,6 @@
                 cache[current] = final
     return cache[initialnode]
 
-def _knownrevs(repo, nodes):
-    """yield revision numbers of known nodes passed in parameters
-
-    Unknown revisions are silently ignored."""
-    torev = repo.changelog.nodemap.get
-    for n in nodes:
-        rev = torev(n)
-        if rev is not None:
-            yield rev
-
 # mapping of 'set-name' -> <function to compute this set>
 cachefuncs = {}
 def cachefor(name):
@@ -1214,7 +1224,7 @@
                 localmetadata.update(rel[2])
 
             if not prec.mutable():
-                raise util.Abort("cannot obsolete public changeset: %s"
+                raise error.Abort("cannot obsolete public changeset: %s"
                                  % prec,
                                  hint='see "hg help phases" for details')
             nprec = prec.node()
@@ -1223,7 +1233,7 @@
             if not nsucs:
                 npare = tuple(p.node() for p in prec.parents())
             if nprec in nsucs:
-                raise util.Abort("changeset %s cannot obsolete itself" % prec)
+                raise error.Abort("changeset %s cannot obsolete itself" % prec)
             repo.obsstore.create(tr, nprec, nsucs, flag, parents=npare,
                                  date=date, metadata=localmetadata)
             repo.filteredrevcache.clear()
@@ -1247,7 +1257,7 @@
     # createmarkers must be enabled if other options are enabled
     if ((allowunstableopt in result or exchangeopt in result) and
         not createmarkersopt in result):
-        raise util.Abort(_("'createmarkers' obsolete option must be enabled "
+        raise error.Abort(_("'createmarkers' obsolete option must be enabled "
                            "if other obsolete options are enabled"))
 
     return option in result
--- a/mercurial/parser.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/parser.py	Tue Oct 20 15:59:10 2015 -0500
@@ -16,8 +16,10 @@
 # an action is a tree node name, a tree label, and an optional match
 # __call__(program) parses program into a labeled tree
 
-import error
-from i18n import _
+from __future__ import absolute_import
+
+from .i18n import _
+from . import error
 
 class parser(object):
     def __init__(self, elements, methods=None):
@@ -120,6 +122,13 @@
         args[k] = x[2]
     return args
 
+def unescapestr(s):
+    try:
+        return s.decode("string_escape")
+    except ValueError as e:
+        # mangle Python's exception into our format
+        raise error.ParseError(str(e).lower())
+
 def _prettyformat(tree, leafnodes, level, lines):
     if not isinstance(tree, tuple) or tree[0] in leafnodes:
         lines.append((level, str(tree)))
--- a/mercurial/parsers.c	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/parsers.c	Tue Oct 20 15:59:10 2015 -0500
@@ -253,8 +253,11 @@
 
 			if (normed == NULL)
 				goto quit;
-			if (PyDict_SetItem(file_foldmap, normed, k) == -1)
+			if (PyDict_SetItem(file_foldmap, normed, k) == -1) {
+				Py_DECREF(normed);
 				goto quit;
+			}
+			Py_DECREF(normed);
 		}
 	}
 	return file_foldmap;
@@ -475,14 +478,14 @@
 			      &str, &readlen))
 		goto quit;
 
-	if (readlen < 0)
-		goto quit;
-
 	len = readlen;
 
 	/* read parents */
-	if (len < 40)
+	if (len < 40) {
+		PyErr_SetString(
+			PyExc_ValueError, "too little data for parents");
 		goto quit;
+	}
 
 	parents = Py_BuildValue("s#s#", str, 20, str + 20, 20);
 	if (!parents)
@@ -548,9 +551,9 @@
 	Py_ssize_t nbytes, pos, l;
 	PyObject *k, *v = NULL, *pn;
 	char *p, *s;
-	double now;
+	int now;
 
-	if (!PyArg_ParseTuple(args, "O!O!Od:pack_dirstate",
+	if (!PyArg_ParseTuple(args, "O!O!Oi:pack_dirstate",
 			      &PyDict_Type, &map, &PyDict_Type, &copymap,
 			      &pl, &now))
 		return NULL;
@@ -603,7 +606,7 @@
 	for (pos = 0; PyDict_Next(map, &pos, &k, &v); ) {
 		dirstateTupleObject *tuple;
 		char state;
-		uint32_t mode, size, mtime;
+		int mode, size, mtime;
 		Py_ssize_t len, l;
 		PyObject *o;
 		char *t;
@@ -619,7 +622,7 @@
 		mode = tuple->mode;
 		size = tuple->size;
 		mtime = tuple->mtime;
-		if (state == 'n' && mtime == (uint32_t)now) {
+		if (state == 'n' && mtime == now) {
 			/* See pure/parsers.py:pack_dirstate for why we do
 			 * this. */
 			mtime = -1;
@@ -633,9 +636,9 @@
 			mtime_unset = NULL;
 		}
 		*p++ = state;
-		putbe32(mode, p);
-		putbe32(size, p + 4);
-		putbe32(mtime, p + 8);
+		putbe32((uint32_t)mode, p);
+		putbe32((uint32_t)size, p + 4);
+		putbe32((uint32_t)mtime, p + 8);
 		t = p + 12;
 		p += 16;
 		len = PyString_GET_SIZE(k);
@@ -679,7 +682,7 @@
 } nodetree;
 
 /*
- * This class has two behaviours.
+ * This class has two behaviors.
  *
  * When used in a list-like way (with integer keys), we decode an
  * entry in a RevlogNG index file on demand. Our last entry is a
@@ -702,8 +705,8 @@
 	PyObject *headrevs;    /* cache, invalidated on changes */
 	PyObject *filteredrevs;/* filtered revs set */
 	nodetree *nt;          /* base-16 trie */
-	int ntlength;          /* # nodes in use */
-	int ntcapacity;        /* # nodes allocated */
+	unsigned ntlength;          /* # nodes in use */
+	unsigned ntcapacity;        /* # nodes allocated */
 	int ntdepth;           /* maximum depth of tree */
 	int ntsplits;          /* # splits performed */
 	int ntrev;             /* last rev scanned */
@@ -1043,13 +1046,12 @@
 	return newlist;
 }
 
-/* arg should be Py_ssize_t but Python 2.4 do not support the n format */
-static int check_filter(PyObject *filter, unsigned long arg) {
+static int check_filter(PyObject *filter, Py_ssize_t arg) {
 	if (filter) {
 		PyObject *arglist, *result;
 		int isfiltered;
 
-		arglist = Py_BuildValue("(k)", arg);
+		arglist = Py_BuildValue("(n)", arg);
 		if (!arglist) {
 			return -1;
 		}
@@ -1105,6 +1107,162 @@
 		phases[i] = phases[parent_2];
 }
 
+static PyObject *reachableroots2(indexObject *self, PyObject *args)
+{
+
+	/* Input */
+	long minroot;
+	PyObject *includepatharg = NULL;
+	int includepath = 0;
+	/* heads and roots are lists */
+	PyObject *heads = NULL;
+	PyObject *roots = NULL;
+	PyObject *reachable = NULL;
+
+	PyObject *val;
+	Py_ssize_t len = index_length(self) - 1;
+	long revnum;
+	Py_ssize_t k;
+	Py_ssize_t i;
+	Py_ssize_t l;
+	int r;
+	int parents[2];
+
+	/* Internal data structure:
+	 * tovisit: array of length len+1 (all revs + nullrev), filled upto lentovisit
+	 * revstates: array of length len+1 (all revs + nullrev) */
+	int *tovisit = NULL;
+	long lentovisit = 0;
+	enum { RS_SEEN = 1, RS_ROOT = 2, RS_REACHABLE = 4 };
+	char *revstates = NULL;
+
+	/* Get arguments */
+	if (!PyArg_ParseTuple(args, "lO!O!O!", &minroot, &PyList_Type, &heads,
+			      &PyList_Type, &roots,
+			      &PyBool_Type, &includepatharg))
+		goto bail;
+
+	if (includepatharg == Py_True)
+		includepath = 1;
+
+	/* Initialize return set */
+	reachable = PyList_New(0);
+	if (reachable == NULL)
+		goto bail;
+
+	/* Initialize internal datastructures */
+	tovisit = (int *)malloc((len + 1) * sizeof(int));
+	if (tovisit == NULL) {
+		PyErr_NoMemory();
+		goto bail;
+	}
+
+	revstates = (char *)calloc(len + 1, 1);
+	if (revstates == NULL) {
+		PyErr_NoMemory();
+		goto bail;
+	}
+
+	l = PyList_GET_SIZE(roots);
+	for (i = 0; i < l; i++) {
+		revnum = PyInt_AsLong(PyList_GET_ITEM(roots, i));
+		if (revnum == -1 && PyErr_Occurred())
+			goto bail;
+		/* If root is out of range, e.g. wdir(), it must be unreachable
+		 * from heads. So we can just ignore it. */
+		if (revnum + 1 < 0 || revnum + 1 >= len + 1)
+			continue;
+		revstates[revnum + 1] |= RS_ROOT;
+	}
+
+	/* Populate tovisit with all the heads */
+	l = PyList_GET_SIZE(heads);
+	for (i = 0; i < l; i++) {
+		revnum = PyInt_AsLong(PyList_GET_ITEM(heads, i));
+		if (revnum == -1 && PyErr_Occurred())
+			goto bail;
+		if (revnum + 1 < 0 || revnum + 1 >= len + 1) {
+			PyErr_SetString(PyExc_IndexError, "head out of range");
+			goto bail;
+		}
+		if (!(revstates[revnum + 1] & RS_SEEN)) {
+			tovisit[lentovisit++] = (int)revnum;
+			revstates[revnum + 1] |= RS_SEEN;
+		}
+	}
+
+	/* Visit the tovisit list and find the reachable roots */
+	k = 0;
+	while (k < lentovisit) {
+		/* Add the node to reachable if it is a root*/
+		revnum = tovisit[k++];
+		if (revstates[revnum + 1] & RS_ROOT) {
+			revstates[revnum + 1] |= RS_REACHABLE;
+			val = PyInt_FromLong(revnum);
+			if (val == NULL)
+				goto bail;
+			r = PyList_Append(reachable, val);
+			Py_DECREF(val);
+			if (r < 0)
+				goto bail;
+			if (includepath == 0)
+				continue;
+		}
+
+		/* Add its parents to the list of nodes to visit */
+		if (revnum == -1)
+			continue;
+		r = index_get_parents(self, revnum, parents, (int)len - 1);
+		if (r < 0)
+			goto bail;
+		for (i = 0; i < 2; i++) {
+			if (!(revstates[parents[i] + 1] & RS_SEEN)
+			    && parents[i] >= minroot) {
+				tovisit[lentovisit++] = parents[i];
+				revstates[parents[i] + 1] |= RS_SEEN;
+			}
+		}
+	}
+
+	/* Find all the nodes in between the roots we found and the heads
+	 * and add them to the reachable set */
+	if (includepath == 1) {
+		long minidx = minroot;
+		if (minidx < 0)
+			minidx = 0;
+		for (i = minidx; i < len; i++) {
+			if (!(revstates[i + 1] & RS_SEEN))
+				continue;
+			r = index_get_parents(self, i, parents, (int)len - 1);
+			/* Corrupted index file, error is set from
+			 * index_get_parents */
+			if (r < 0)
+				goto bail;
+			if (((revstates[parents[0] + 1] |
+			      revstates[parents[1] + 1]) & RS_REACHABLE)
+			    && !(revstates[i + 1] & RS_REACHABLE)) {
+				revstates[i + 1] |= RS_REACHABLE;
+				val = PyInt_FromLong(i);
+				if (val == NULL)
+					goto bail;
+				r = PyList_Append(reachable, val);
+				Py_DECREF(val);
+				if (r < 0)
+					goto bail;
+			}
+		}
+	}
+
+	free(revstates);
+	free(tovisit);
+	return reachable;
+bail:
+	Py_XDECREF(reachable);
+	free(revstates);
+	free(tovisit);
+	return NULL;
+}
+
 static PyObject *compute_phases_map_sets(indexObject *self, PyObject *args)
 {
 	PyObject *roots = Py_None;
@@ -1419,7 +1577,7 @@
 static int nt_init(indexObject *self)
 {
 	if (self->nt == NULL) {
-		if (self->raw_length > INT_MAX / sizeof(nodetree)) {
+		if ((size_t)self->raw_length > INT_MAX / sizeof(nodetree)) {
 			PyErr_SetString(PyExc_ValueError, "overflow in nt_init");
 			return -1;
 		}
@@ -2011,16 +2169,18 @@
  */
 static PyObject *index_ancestors(indexObject *self, PyObject *args)
 {
+	PyObject *ret;
 	PyObject *gca = index_commonancestorsheads(self, args);
 	if (gca == NULL)
 		return NULL;
 
 	if (PyList_GET_SIZE(gca) <= 1) {
-		Py_INCREF(gca);
 		return gca;
 	}
 
-	return find_deepest(self, gca);
+	ret = find_deepest(self, gca);
+	Py_DECREF(gca);
+	return ret;
 }
 
 /*
@@ -2282,6 +2442,8 @@
 	 "get an index entry"},
 	{"computephasesmapsets", (PyCFunction)compute_phases_map_sets,
 			METH_VARARGS, "compute phases"},
+	{"reachableroots2", (PyCFunction)reachableroots2, METH_VARARGS,
+		"reachableroots"},
 	{"headrevs", (PyCFunction)index_headrevs, METH_VARARGS,
 	 "get head revisions"}, /* Can do filtering since 3.2 */
 	{"headrevsfiltered", (PyCFunction)index_headrevs, METH_VARARGS,
@@ -2387,6 +2549,7 @@
 
 #define BUMPED_FIX 1
 #define USING_SHA_256 2
+#define FM1_HEADER_SIZE (4 + 8 + 2 + 2 + 1 + 1 + 1)
 
 static PyObject *readshas(
 	const char *source, unsigned char num, Py_ssize_t hashwidth)
@@ -2402,14 +2565,16 @@
 			Py_DECREF(list);
 			return NULL;
 		}
-		PyTuple_SetItem(list, i, hash);
+		PyTuple_SET_ITEM(list, i, hash);
 		source += hashwidth;
 	}
 	return list;
 }
 
-static PyObject *fm1readmarker(const char *data, uint32_t *msize)
+static PyObject *fm1readmarker(const char *databegin, const char *dataend,
+			       uint32_t *msize)
 {
+	const char *data = databegin;
 	const char *meta;
 
 	double mtime;
@@ -2422,6 +2587,10 @@
 	PyObject *metadata = NULL, *ret = NULL;
 	int i;
 
+	if (data + FM1_HEADER_SIZE > dataend) {
+		goto overflow;
+	}
+
 	*msize = getbe32(data);
 	data += 4;
 	mtime = getbefloat64(data);
@@ -2439,12 +2608,23 @@
 	nparents = (unsigned char)(*data++);
 	nmetadata = (unsigned char)(*data++);
 
+	if (databegin + *msize > dataend) {
+		goto overflow;
+	}
+	dataend = databegin + *msize;  /* narrow down to marker size */
+
+	if (data + hashwidth > dataend) {
+		goto overflow;
+	}
 	prec = PyString_FromStringAndSize(data, hashwidth);
 	data += hashwidth;
 	if (prec == NULL) {
 		goto bail;
 	}
 
+	if (data + nsuccs * hashwidth > dataend) {
+		goto overflow;
+	}
 	succs = readshas(data, nsuccs, hashwidth);
 	if (succs == NULL) {
 		goto bail;
@@ -2452,6 +2632,9 @@
 	data += nsuccs * hashwidth;
 
 	if (nparents == 1 || nparents == 2) {
+		if (data + nparents * hashwidth > dataend) {
+			goto overflow;
+		}
 		parents = readshas(data, nparents, hashwidth);
 		if (parents == NULL) {
 			goto bail;
@@ -2461,6 +2644,9 @@
 		parents = Py_None;
 	}
 
+	if (data + 2 * nmetadata > dataend) {
+		goto overflow;
+	}
 	meta = data + (2 * nmetadata);
 	metadata = PyTuple_New(nmetadata);
 	if (metadata == NULL) {
@@ -2468,27 +2654,32 @@
 	}
 	for (i = 0; i < nmetadata; i++) {
 		PyObject *tmp, *left = NULL, *right = NULL;
-		Py_ssize_t metasize = (unsigned char)(*data++);
-		left = PyString_FromStringAndSize(meta, metasize);
-		meta += metasize;
-		metasize = (unsigned char)(*data++);
-		right = PyString_FromStringAndSize(meta, metasize);
-		meta += metasize;
-		if (!left || !right) {
+		Py_ssize_t leftsize = (unsigned char)(*data++);
+		Py_ssize_t rightsize = (unsigned char)(*data++);
+		if (meta + leftsize + rightsize > dataend) {
+			goto overflow;
+		}
+		left = PyString_FromStringAndSize(meta, leftsize);
+		meta += leftsize;
+		right = PyString_FromStringAndSize(meta, rightsize);
+		meta += rightsize;
+		tmp = PyTuple_New(2);
+		if (!left || !right || !tmp) {
 			Py_XDECREF(left);
 			Py_XDECREF(right);
+			Py_XDECREF(tmp);
 			goto bail;
 		}
-		tmp = PyTuple_Pack(2, left, right);
-		Py_DECREF(left);
-		Py_DECREF(right);
-		if (!tmp) {
-			goto bail;
-		}
-		PyTuple_SetItem(metadata, i, tmp);
+		PyTuple_SET_ITEM(tmp, 0, left);
+		PyTuple_SET_ITEM(tmp, 1, right);
+		PyTuple_SET_ITEM(metadata, i, tmp);
 	}
 	ret = Py_BuildValue("(OOHO(di)O)", prec, succs, flags,
 			    metadata, mtime, (int)tz * 60, parents);
+	goto bail;  /* return successfully */
+
+overflow:
+	PyErr_SetString(PyExc_ValueError, "overflow in obsstore");
 bail:
 	Py_XDECREF(prec);
 	Py_XDECREF(succs);
@@ -2500,16 +2691,15 @@
 
 
 static PyObject *fm1readmarkers(PyObject *self, PyObject *args) {
-	const char *data;
+	const char *data, *dataend;
 	Py_ssize_t datalen;
-	/* only unsigned long because python 2.4, should be Py_ssize_t */
-	unsigned long offset, stop;
+	Py_ssize_t offset, stop;
 	PyObject *markers = NULL;
 
-	/* replace kk with nn when we drop Python 2.4 */
-	if (!PyArg_ParseTuple(args, "s#kk", &data, &datalen, &offset, &stop)) {
+	if (!PyArg_ParseTuple(args, "s#nn", &data, &datalen, &offset, &stop)) {
 		return NULL;
 	}
+	dataend = data + datalen;
 	data += offset;
 	markers = PyList_New(0);
 	if (!markers) {
@@ -2518,7 +2708,7 @@
 	while (offset < stop) {
 		uint32_t msize;
 		int error;
-		PyObject *record = fm1readmarker(data, &msize);
+		PyObject *record = fm1readmarker(data, dataend, &msize);
 		if (!record) {
 			goto bail;
 		}
--- a/mercurial/patch.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/patch.py	Tue Oct 20 15:59:10 2015 -0500
@@ -151,13 +151,28 @@
     # if we are here, we have a very plain patch
     return remainder(cur)
 
+## Some facility for extensible patch parsing:
+# list of pairs ("header to match", "data key")
+patchheadermap = [('Date', 'date'),
+                  ('Branch', 'branch'),
+                  ('Node ID', 'nodeid'),
+                 ]
+
 def extract(ui, fileobj):
     '''extract patch from data read from fileobj.
 
     patch can be a normal patch or contained in an email message.
 
-    return tuple (filename, message, user, date, branch, node, p1, p2).
-    Any item in the returned tuple can be None. If filename is None,
+    return a dictionary. Standard keys are:
+      - filename,
+      - message,
+      - user,
+      - date,
+      - branch,
+      - node,
+      - p1,
+      - p2.
+    Any item can be missing from the dictionary. If filename is missing,
     fileobj did not contain a patch. Caller must unlink filename when done.'''
 
     # attempt to detect the start of a patch
@@ -167,21 +182,19 @@
                         r'---[ \t].*?^\+\+\+[ \t]|'
                         r'\*\*\*[ \t].*?^---[ \t])', re.MULTILINE|re.DOTALL)
 
+    data = {}
     fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
     tmpfp = os.fdopen(fd, 'w')
     try:
         msg = email.Parser.Parser().parse(fileobj)
 
         subject = msg['Subject']
-        user = msg['From']
-        if not subject and not user:
+        data['user'] = msg['From']
+        if not subject and not data['user']:
             # Not an email, restore parsed headers if any
             subject = '\n'.join(': '.join(h) for h in msg.items()) + '\n'
 
         # should try to parse msg['Date']
-        date = None
-        nodeid = None
-        branch = None
         parents = []
 
         if subject:
@@ -191,8 +204,8 @@
                     subject = subject[pend + 1:].lstrip()
             subject = re.sub(r'\n[ \t]+', ' ', subject)
             ui.debug('Subject: %s\n' % subject)
-        if user:
-            ui.debug('From: %s\n' % user)
+        if data['user']:
+            ui.debug('From: %s\n' % data['user'])
         diffs_seen = 0
         ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
         message = ''
@@ -222,17 +235,16 @@
                         subject = None
                     elif hgpatchheader:
                         if line.startswith('# User '):
-                            user = line[7:]
-                            ui.debug('From: %s\n' % user)
-                        elif line.startswith("# Date "):
-                            date = line[7:]
-                        elif line.startswith("# Branch "):
-                            branch = line[9:]
-                        elif line.startswith("# Node ID "):
-                            nodeid = line[10:]
+                            data['user'] = line[7:]
+                            ui.debug('From: %s\n' % data['user'])
                         elif line.startswith("# Parent "):
                             parents.append(line[9:].lstrip())
-                        elif not line.startswith("# "):
+                        elif line.startswith("# "):
+                            for header, key in patchheadermap:
+                                prefix = '# %s ' % header
+                                if line.startswith(prefix):
+                                    data[key] = line[len(prefix):]
+                        else:
                             hgpatchheader = False
                     elif line == '---':
                         ignoretext = True
@@ -253,22 +265,18 @@
 
     if subject and not message.startswith(subject):
         message = '%s\n%s' % (subject, message)
+    data['message'] = message
     tmpfp.close()
-    if not diffs_seen:
-        os.unlink(tmpname)
-        return None, message, user, date, branch, None, None, None
+    if parents:
+        data['p1'] = parents.pop(0)
+        if parents:
+            data['p2'] = parents.pop(0)
 
-    if parents:
-        p1 = parents.pop(0)
+    if diffs_seen:
+        data['filename'] = tmpname
     else:
-        p1 = None
-
-    if parents:
-        p2 = parents.pop(0)
-    else:
-        p2 = None
-
-    return tmpname, message, user, date, branch, nodeid, p1, p2
+        os.unlink(tmpname)
+    return data
 
 class patchmeta(object):
     """Patched file metadata
@@ -995,7 +1003,7 @@
                     ui.write("\n")
                     continue
                 # Patch comment based on the Git one (based on comment at end of
-                # http://mercurial.selenic.com/wiki/RecordExtension)
+                # https://mercurial-scm.org/wiki/RecordExtension)
                 phelp = '---' + _("""
 To remove '-' lines, make them ' ' lines (context).
 To remove '+' lines, delete them.
@@ -1048,7 +1056,7 @@
             elif r == 6: # all
                 ret = skipall = True
             elif r == 7: # quit
-                raise util.Abort(_('user quit'))
+                raise error.Abort(_('user quit'))
             return ret, skipfile, skipall, newpatches
 
     seen = set()
@@ -1956,7 +1964,7 @@
                 else:
                     store.setfile(path, data, mode)
         else:
-            raise util.Abort(_('unsupported parser state: %s') % state)
+            raise error.Abort(_('unsupported parser state: %s') % state)
 
     if current_file:
         rejects += current_file.close()
@@ -2014,7 +2022,7 @@
     if eolmode is None:
         eolmode = ui.config('patch', 'eol', 'strict')
     if eolmode.lower() not in eolmodes:
-        raise util.Abort(_('unsupported line endings type: %s') % eolmode)
+        raise error.Abort(_('unsupported line endings type: %s') % eolmode)
     eolmode = eolmode.lower()
 
     store = filestore()
@@ -2087,7 +2095,7 @@
                 if gp.op == 'RENAME':
                     changed.add(gp.oldpath)
             elif state not in ('hunk', 'git'):
-                raise util.Abort(_('unsupported parser state: %s') % state)
+                raise error.Abort(_('unsupported parser state: %s') % state)
         return changed
     finally:
         fp.close()
--- a/mercurial/pathencode.c	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/pathencode.c	Tue Oct 20 15:59:10 2015 -0500
@@ -684,6 +684,8 @@
 
 	hashobj = PyObject_CallMethod(shaobj, "digest", "");
 	Py_DECREF(shaobj);
+	if (hashobj == NULL)
+		return -1;
 
 	if (!PyString_Check(hashobj) || PyString_GET_SIZE(hashobj) != 20) {
 		PyErr_SetString(PyExc_TypeError,
--- a/mercurial/pathutil.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/pathutil.py	Tue Oct 20 15:59:10 2015 -0500
@@ -1,8 +1,16 @@
-import os, errno, stat, posixpath
+from __future__ import absolute_import
+
+import errno
+import os
+import posixpath
+import stat
 
-import encoding
-import util
-from i18n import _
+from .i18n import _
+from . import (
+    encoding,
+    error,
+    util,
+)
 
 def _lowerclean(s):
     return encoding.hfsignoreclean(s.lower())
@@ -40,18 +48,18 @@
             return
         # AIX ignores "/" at end of path, others raise EISDIR.
         if util.endswithsep(path):
-            raise util.Abort(_("path ends in directory separator: %s") % path)
+            raise error.Abort(_("path ends in directory separator: %s") % path)
         parts = util.splitpath(path)
         if (os.path.splitdrive(path)[0]
             or _lowerclean(parts[0]) in ('.hg', '.hg.', '')
             or os.pardir in parts):
-            raise util.Abort(_("path contains illegal component: %s") % path)
+            raise error.Abort(_("path contains illegal component: %s") % path)
         # Windows shortname aliases
         for p in parts:
             if "~" in p:
                 first, last = p.split("~", 1)
                 if last.isdigit() and first.upper() in ["HG", "HG8B6C"]:
-                    raise util.Abort(_("path contains illegal component: %s")
+                    raise error.Abort(_("path contains illegal component: %s")
                                      % path)
         if '.hg' in _lowerclean(path):
             lparts = [_lowerclean(p.lower()) for p in parts]
@@ -59,7 +67,7 @@
                 if p in lparts[1:]:
                     pos = lparts.index(p)
                     base = os.path.join(*parts[:pos])
-                    raise util.Abort(_("path '%s' is inside nested repo %r")
+                    raise error.Abort(_("path '%s' is inside nested repo %r")
                                      % (path, base))
 
         normparts = util.splitpath(normpath)
@@ -83,13 +91,13 @@
                     raise
             else:
                 if stat.S_ISLNK(st.st_mode):
-                    raise util.Abort(
+                    raise error.Abort(
                         _('path %r traverses symbolic link %r')
                         % (path, prefix))
                 elif (stat.S_ISDIR(st.st_mode) and
                       os.path.isdir(os.path.join(curpath, '.hg'))):
                     if not self.callback or not self.callback(curpath):
-                        raise util.Abort(_("path '%s' is inside nested "
+                        raise error.Abort(_("path '%s' is inside nested "
                                            "repo %r")
                                          % (path, prefix))
             prefixes.append(normprefix)
@@ -105,7 +113,7 @@
         try:
             self(path)
             return True
-        except (OSError, util.Abort):
+        except (OSError, error.Abort):
             return False
 
 def canonpath(root, cwd, myname, auditor=None):
@@ -160,10 +168,10 @@
                 canonpath(root, root, myname, auditor)
                 hint = (_("consider using '--cwd %s'")
                         % os.path.relpath(root, cwd))
-        except util.Abort:
+        except error.Abort:
             pass
 
-        raise util.Abort(_("%s not under root '%s'") % (myname, root),
+        raise error.Abort(_("%s not under root '%s'") % (myname, root),
                          hint=hint)
 
 def normasprefix(path):
--- a/mercurial/peer.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/peer.py	Tue Oct 20 15:59:10 2015 -0500
@@ -6,11 +6,91 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-from i18n import _
-import error
+from __future__ import absolute_import
+
+from .i18n import _
+from . import (
+    error,
+    util,
+)
+
+# abstract batching support
+
+class future(object):
+    '''placeholder for a value to be set later'''
+    def set(self, value):
+        if util.safehasattr(self, 'value'):
+            raise error.RepoError("future is already set")
+        self.value = value
+
+class batcher(object):
+    '''base class for batches of commands submittable in a single request
+
+    All methods invoked on instances of this class are simply queued and
+    return a a future for the result. Once you call submit(), all the queued
+    calls are performed and the results set in their respective futures.
+    '''
+    def __init__(self):
+        self.calls = []
+    def __getattr__(self, name):
+        def call(*args, **opts):
+            resref = future()
+            self.calls.append((name, args, opts, resref,))
+            return resref
+        return call
+    def submit(self):
+        pass
+
+class localbatch(batcher):
+    '''performs the queued calls directly'''
+    def __init__(self, local):
+        batcher.__init__(self)
+        self.local = local
+    def submit(self):
+        for name, args, opts, resref in self.calls:
+            resref.set(getattr(self.local, name)(*args, **opts))
+
+def batchable(f):
+    '''annotation for batchable methods
+
+    Such methods must implement a coroutine as follows:
+
+    @batchable
+    def sample(self, one, two=None):
+        # Handle locally computable results first:
+        if not one:
+            yield "a local result", None
+        # Build list of encoded arguments suitable for your wire protocol:
+        encargs = [('one', encode(one),), ('two', encode(two),)]
+        # Create future for injection of encoded result:
+        encresref = future()
+        # Return encoded arguments and future:
+        yield encargs, encresref
+        # Assuming the future to be filled with the result from the batched
+        # request now. Decode it:
+        yield decode(encresref.value)
+
+    The decorator returns a function which wraps this coroutine as a plain
+    method, but adds the original method as an attribute called "batchable",
+    which is used by remotebatch to split the call into separate encoding and
+    decoding phases.
+    '''
+    def plain(*args, **opts):
+        batchable = f(*args, **opts)
+        encargsorres, encresref = batchable.next()
+        if not encresref:
+            return encargsorres # a local result in this case
+        self = args[0]
+        encresref.set(self._submitone(f.func_name, encargsorres))
+        return batchable.next()
+    setattr(plain, 'batchable', f)
+    return plain
 
 class peerrepository(object):
 
+    def batch(self):
+        return localbatch(self)
+
     def capable(self, name):
         '''tell whether repo supports named capability.
         return False if not supported.
--- a/mercurial/phases.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/phases.py	Tue Oct 20 15:59:10 2015 -0500
@@ -100,11 +100,22 @@
 
 """
 
-import os
+from __future__ import absolute_import
+
 import errno
-from node import nullid, nullrev, bin, hex, short
-from i18n import _
-import util, error
+import os
+
+from .i18n import _
+from .node import (
+    bin,
+    hex,
+    nullid,
+    nullrev,
+    short,
+)
+from . import (
+    error,
+)
 
 allphases = public, draft, secret = range(3)
 trackedphases = allphases[1:]
@@ -294,7 +305,7 @@
                     if self.phase(repo, repo[n].rev()) < targetphase]
         if newroots:
             if nullid in newroots:
-                raise util.Abort(_('cannot change null revision phase'))
+                raise error.Abort(_('cannot change null revision phase'))
             currentroots = currentroots.copy()
             currentroots.update(newroots)
             ctxs = repo.set('roots(%ln::)', currentroots)
--- a/mercurial/posix.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/posix.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,11 +5,26 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-from i18n import _
-import encoding
-import os, sys, errno, stat, getpass, pwd, grp, socket, tempfile, unicodedata
+from __future__ import absolute_import
+
+import errno
+import fcntl
+import getpass
+import grp
+import os
+import pwd
+import re
 import select
-import fcntl, re
+import socket
+import stat
+import sys
+import tempfile
+import unicodedata
+
+from .i18n import _
+from . import (
+    encoding,
+)
 
 posixfile = open
 normpath = os.path.normpath
@@ -335,7 +350,7 @@
         return '"%s"' % s
     global _needsshellquote
     if _needsshellquote is None:
-        _needsshellquote = re.compile(r'[^a-zA-Z0-9._/-]').search
+        _needsshellquote = re.compile(r'[^a-zA-Z0-9._/+-]').search
     if s and not _needsshellquote(s):
         # "s" shouldn't have to be quoted
         return s
@@ -459,7 +474,8 @@
 
 def termwidth():
     try:
-        import termios, array
+        import array
+        import termios
         for dev in (sys.stderr, sys.stdout, sys.stdin):
             try:
                 try:
--- a/mercurial/progress.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/progress.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,13 +5,14 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
+from __future__ import absolute_import
+
 import sys
-import time
 import threading
-from mercurial import encoding
+import time
 
-from mercurial.i18n import _
-
+from .i18n import _
+from . import encoding
 
 def spacejoin(*args):
     return ' '.join(s for s in args if s)
@@ -21,7 +22,7 @@
         ui._isatty(sys.stderr) or ui.configbool('progress', 'assume-tty'))
 
 def fmtremaining(seconds):
-    """format a number of remaining seconds in humain readable way
+    """format a number of remaining seconds in human readable way
 
     This will properly display seconds, minutes, hours, days if needed"""
     if seconds < 60:
@@ -165,6 +166,9 @@
         if not shouldprint(self.ui):
             return
         sys.stderr.write('\r%s\r' % (' ' * self.width()))
+        if self.printed:
+            # force immediate re-paint of progress bar
+            self.lastprint = 0
 
     def complete(self):
         if not shouldprint(self.ui):
--- a/mercurial/pushkey.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/pushkey.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,7 +5,14 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-import bookmarks, phases, obsolete, encoding
+from __future__ import absolute_import
+
+from . import (
+    bookmarks,
+    encoding,
+    obsolete,
+    phases,
+)
 
 def _nslist(repo):
     n = {}
--- a/mercurial/repair.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/repair.py	Tue Oct 20 15:59:10 2015 -0500
@@ -6,25 +6,25 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-from mercurial import changegroup, exchange, util, bundle2
-from mercurial.node import short
-from mercurial.i18n import _
+from __future__ import absolute_import
+
 import errno
 
+from .i18n import _
+from .node import short
+from . import (
+    bundle2,
+    changegroup,
+    error,
+    exchange,
+    util,
+)
+
 def _bundle(repo, bases, heads, node, suffix, compress=True):
     """create a bundle with the specified revisions as a backup"""
-    usebundle2 = (repo.ui.configbool('experimental', 'bundle2-exp', True) and
-                  repo.ui.config('experimental', 'strip-bundle2-version'))
-    if usebundle2:
-        cgversion = repo.ui.config('experimental', 'strip-bundle2-version')
-        if cgversion not in changegroup.packermap:
-            repo.ui.warn(_('unknown strip-bundle2-version value %r; '
-                            'should be one of %r\n') %
-                         (cgversion, sorted(changegroup.packermap.keys()),))
-            cgversion = '01'
-            usebundle2 = False
-    else:
-        cgversion = '01'
+    cgversion = '01'
+    if 'generaldelta' in repo.requirements:
+        cgversion = '02'
 
     cg = changegroup.changegroupsubset(repo, bases, heads, 'strip',
                                        version=cgversion)
@@ -39,13 +39,17 @@
     totalhash = util.sha1(''.join(allhashes)).hexdigest()
     name = "%s/%s-%s-%s.hg" % (backupdir, short(node), totalhash[:8], suffix)
 
-    if usebundle2:
+    comp = None
+    if cgversion != '01':
         bundletype = "HG20"
+        if compress:
+            comp = 'BZ'
     elif compress:
         bundletype = "HG10BZ"
     else:
         bundletype = "HG10UN"
-    return changegroup.writebundle(repo.ui, cg, name, bundletype, vfs)
+    return changegroup.writebundle(repo.ui, cg, name, bundletype, vfs,
+                                   compression=comp)
 
 def _collectfiles(repo, striprev):
     """find out the filelogs affected by the strip"""
@@ -153,7 +157,7 @@
     if curtr is not None:
         del curtr  # avoid carrying reference to transaction for nothing
         msg = _('programming error: cannot strip from inside a transaction')
-        raise util.Abort(msg, hint=_('contact your extension maintainer'))
+        raise error.Abort(msg, hint=_('contact your extension maintainer'))
 
     tr = repo.transaction("strip")
     offset = len(tr.entries)
@@ -188,14 +192,13 @@
                 tr.hookargs = {'source': 'strip',
                                'url': 'bundle:' + vfs.join(chgrpfile)}
                 try:
-                    bundle2.processbundle(repo, gen, lambda: tr)
+                    bundle2.applybundle(repo, gen, tr, source='strip',
+                                        url='bundle:' + vfs.join(chgrpfile))
                     tr.close()
                 finally:
                     tr.release()
             else:
-                changegroup.addchangegroup(repo, gen, 'strip',
-                                           'bundle:' + vfs.join(chgrpfile),
-                                           True)
+                gen.apply(repo, 'strip', 'bundle:' + vfs.join(chgrpfile), True)
             if not repo.ui.verbose:
                 repo.ui.popbuffer()
             f.close()
@@ -295,3 +298,14 @@
     finally:
         lock.release()
 
+def stripbmrevset(repo, mark):
+    """
+    The revset to strip when strip is called with -B mark
+
+    Needs to live here so extensions can use it and wrap it even when strip is
+    not enabled or not present on a box.
+    """
+    return repo.revs("ancestors(bookmark(%s)) - "
+                     "ancestors(head() and not bookmark(%s)) - "
+                     "ancestors(bookmark() and not bookmark(%s))",
+                     mark, mark, mark)
--- a/mercurial/repoview.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/repoview.py	Tue Oct 20 15:59:10 2015 -0500
@@ -6,15 +6,20 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-import heapq
+from __future__ import absolute_import
+
 import copy
-import error
-import phases
-import util
-import obsolete
+import heapq
 import struct
-import tags as tagsmod
-from node import nullrev
+
+from .node import nullrev
+from . import (
+    error,
+    obsolete,
+    phases,
+    tags as tagsmod,
+    util,
+)
 
 def hideablerevs(repo):
     """Revisions candidates to be hidden
--- a/mercurial/revlog.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/revlog.py	Tue Oct 20 15:59:10 2015 -0500
@@ -100,11 +100,10 @@
 #  4 bytes: compressed length
 #  4 bytes: base rev
 #  4 bytes: link rev
-# 32 bytes: parent 1 nodeid
-# 32 bytes: parent 2 nodeid
-# 32 bytes: nodeid
+# 20 bytes: parent 1 nodeid
+# 20 bytes: parent 2 nodeid
+# 20 bytes: nodeid
 indexformatv0 = ">4l20s20s20s"
-v0shaoffset = 56
 
 class revlogoldio(object):
     def __init__(self):
@@ -150,7 +149,6 @@
 #  4 bytes: parent 2 rev
 # 32 bytes: nodeid
 indexformatng = ">Qiiiiii20s12x"
-ngshaoffset = 32
 versionformat = ">I"
 
 # corresponds to uncompressed length of indexformatng (2 gigs, 4-byte
@@ -212,6 +210,7 @@
         self._chunkcache = (0, '')
         self._chunkcachesize = 65536
         self._maxchainlen = None
+        self._aggressivemergedeltas = False
         self.index = []
         self._pcache = {}
         self._nodecache = {nullid: nullrev}
@@ -229,6 +228,8 @@
                 self._chunkcachesize = opts['chunkcachesize']
             if 'maxchainlen' in opts:
                 self._maxchainlen = opts['maxchainlen']
+            if 'aggressivemergedeltas' in opts:
+                self._aggressivemergedeltas = opts['aggressivemergedeltas']
 
         if self._chunkcachesize <= 0:
             raise RevlogError(_('revlog chunk cache size %r is not greater '
@@ -237,14 +238,14 @@
             raise RevlogError(_('revlog chunk cache size %r is not a power '
                                 'of 2') % self._chunkcachesize)
 
-        i = ''
+        indexdata = ''
         self._initempty = True
         try:
             f = self.opener(self.indexfile)
-            i = f.read()
+            indexdata = f.read()
             f.close()
-            if len(i) > 0:
-                v = struct.unpack(versionformat, i[:4])[0]
+            if len(indexdata) > 0:
+                v = struct.unpack(versionformat, indexdata[:4])[0]
                 self._initempty = False
         except IOError as inst:
             if inst.errno != errno.ENOENT:
@@ -269,7 +270,7 @@
         if self.version == REVLOGV0:
             self._io = revlogoldio()
         try:
-            d = self._io.parseindex(i, self._inline)
+            d = self._io.parseindex(indexdata, self._inline)
         except (ValueError, IndexError):
             raise RevlogError(_("index %s is corrupted") % (self.indexfile))
         self.index, nodemap, self._chunkcache = d
@@ -931,11 +932,23 @@
         else:
             self._chunkcache = offset, data
 
-    def _loadchunk(self, offset, length):
-        if self._inline:
-            df = self.opener(self.indexfile)
+    def _loadchunk(self, offset, length, df=None):
+        """Load a chunk/segment from the revlog.
+
+        Accepts absolute offset, length to read, and an optional existing
+        file handle to read from.
+
+        If an existing file handle is passed, it will be seeked and the
+        original seek position will NOT be restored.
+        """
+        if df is not None:
+            closehandle = False
         else:
-            df = self.opener(self.datafile)
+            if self._inline:
+                df = self.opener(self.indexfile)
+            else:
+                df = self.opener(self.datafile)
+            closehandle = True
 
         # Cache data both forward and backward around the requested
         # data, in a fixed size window. This helps speed up operations
@@ -946,13 +959,14 @@
                       - realoffset)
         df.seek(realoffset)
         d = df.read(reallength)
-        df.close()
+        if closehandle:
+            df.close()
         self._addchunk(realoffset, d)
         if offset != realoffset or reallength != length:
             return util.buffer(d, offset - realoffset, length)
         return d
 
-    def _getchunk(self, offset, length):
+    def _getchunk(self, offset, length, df=None):
         o, d = self._chunkcache
         l = len(d)
 
@@ -964,21 +978,21 @@
                 return d # avoid a copy
             return util.buffer(d, cachestart, cacheend - cachestart)
 
-        return self._loadchunk(offset, length)
+        return self._loadchunk(offset, length, df=df)
 
-    def _chunkraw(self, startrev, endrev):
+    def _chunkraw(self, startrev, endrev, df=None):
         start = self.start(startrev)
         end = self.end(endrev)
         if self._inline:
             start += (startrev + 1) * self._io.size
             end += (endrev + 1) * self._io.size
         length = end - start
-        return self._getchunk(start, length)
+        return self._getchunk(start, length, df=df)
 
-    def _chunk(self, rev):
-        return decompress(self._chunkraw(rev, rev))
+    def _chunk(self, rev, df=None):
+        return decompress(self._chunkraw(rev, rev, df=df))
 
-    def _chunks(self, revs):
+    def _chunks(self, revs, df=None):
         '''faster version of [self._chunk(rev) for rev in revs]
 
         Assumes that revs is in ascending order.'''
@@ -998,14 +1012,14 @@
             while True:
                 # ensure that the cache doesn't change out from under us
                 _cache = self._chunkcache
-                self._chunkraw(revs[0], revs[-1])
+                self._chunkraw(revs[0], revs[-1], df=df)
                 if _cache == self._chunkcache:
                     break
             offset, data = _cache
         except OverflowError:
             # issue4215 - we can't cache a run of chunks greater than
             # 2G on Windows
-            return [self._chunk(rev) for rev in revs]
+            return [self._chunk(rev, df=df) for rev in revs]
 
         for rev in revs:
             chunkstart = start(rev)
@@ -1037,9 +1051,12 @@
         return mdiff.textdiff(self.revision(rev1),
                               self.revision(rev2))
 
-    def revision(self, nodeorrev):
+    def revision(self, nodeorrev, _df=None):
         """return an uncompressed revision of a given node or revision
         number.
+
+        _df is an existing file handle to read from. It is meant to only be
+        used internally.
         """
         if isinstance(nodeorrev, int):
             rev = nodeorrev
@@ -1048,14 +1065,13 @@
             node = nodeorrev
             rev = None
 
-        _cache = self._cache # grab local copy of cache to avoid thread race
         cachedrev = None
         if node == nullid:
             return ""
-        if _cache:
-            if _cache[0] == node:
-                return _cache[2]
-            cachedrev = _cache[1]
+        if self._cache:
+            if self._cache[0] == node:
+                return self._cache[2]
+            cachedrev = self._cache[1]
 
         # look up what we need to read
         text = None
@@ -1083,7 +1099,7 @@
 
         if iterrev == cachedrev:
             # cache hit
-            text = _cache[2]
+            text = self._cache[2]
         else:
             chain.append(iterrev)
         chain.reverse()
@@ -1091,7 +1107,7 @@
         # drop cache to save memory
         self._cache = None
 
-        bins = self._chunks(chain)
+        bins = self._chunks(chain, df=_df)
         if text is None:
             text = str(bins[0])
             bins = bins[1:]
@@ -1125,6 +1141,12 @@
                 % (self.indexfile, revornode))
 
     def checkinlinesize(self, tr, fp=None):
+        """Check if the revlog is too big for inline and convert if so.
+
+        This should be called after revisions are added to the revlog. If the
+        revlog has grown too large to be an inline revlog, it will convert it
+        to use multiple index and data files.
+        """
         if not self._inline or (self.start(-2) + self.length(-2)) < _maxinline:
             return
 
@@ -1196,7 +1218,7 @@
 
         dfh = None
         if not self._inline:
-            dfh = self.opener(self.datafile, "a")
+            dfh = self.opener(self.datafile, "a+")
         ifh = self.opener(self.indexfile, "a+")
         try:
             return self._addrevision(node, text, transaction, link, p1, p2,
@@ -1235,8 +1257,27 @@
             return ('u', text)
         return ("", bin)
 
+    def _isgooddelta(self, d, textlen):
+        """Returns True if the given delta is good. Good means that it is within
+        the disk span, disk size, and chain length bounds that we know to be
+        performant."""
+        if d is None:
+            return False
+
+        # - 'dist' is the distance from the base revision -- bounding it limits
+        #   the amount of I/O we need to do.
+        # - 'compresseddeltalen' is the sum of the total size of deltas we need
+        #   to apply -- bounding it limits the amount of CPU we consume.
+        dist, l, data, base, chainbase, chainlen, compresseddeltalen = d
+        if (dist > textlen * 4 or l > textlen or
+            compresseddeltalen > textlen * 2 or
+            (self._maxchainlen and chainlen > self._maxchainlen)):
+            return False
+
+        return True
+
     def _addrevision(self, node, text, transaction, link, p1, p2, flags,
-                     cachedelta, ifh, dfh):
+                     cachedelta, ifh, dfh, alwayscache=False):
         """internal function to add revisions to the log
 
         see addrevision for argument descriptions.
@@ -1248,10 +1289,6 @@
         def buildtext():
             if btext[0] is not None:
                 return btext[0]
-            # flush any pending writes here so we can read it in revision
-            if dfh:
-                dfh.flush()
-            ifh.flush()
             baserev = cachedelta[0]
             delta = cachedelta[1]
             # special case deltas which replace entire base; no need to decode
@@ -1262,7 +1299,11 @@
                                                        len(delta) - hlen):
                 btext[0] = delta[hlen:]
             else:
-                basetext = self.revision(self.node(baserev))
+                if self._inline:
+                    fh = ifh
+                else:
+                    fh = dfh
+                basetext = self.revision(self.node(baserev), _df=fh)
                 btext[0] = mdiff.patch(basetext, delta)
             try:
                 self.checkhash(btext[0], p1, p2, node)
@@ -1286,7 +1327,11 @@
                     header = mdiff.replacediffheader(self.rawsize(rev), len(t))
                     delta = header + t
                 else:
-                    ptext = self.revision(self.node(rev))
+                    if self._inline:
+                        fh = ifh
+                    else:
+                        fh = dfh
+                    ptext = self.revision(self.node(rev), _df=fh)
                     delta = mdiff.textdiff(ptext, t)
             data = self.compress(delta)
             l = len(data[1]) + len(data[0])
@@ -1315,19 +1360,6 @@
         basecache = self._basecache
         p1r, p2r = self.rev(p1), self.rev(p2)
 
-        # should we try to build a delta?
-        if prev != nullrev:
-            if self._generaldelta:
-                if p1r >= basecache[1]:
-                    d = builddelta(p1r)
-                elif p2r >= basecache[1]:
-                    d = builddelta(p2r)
-                else:
-                    d = builddelta(prev)
-            else:
-                d = builddelta(prev)
-            dist, l, data, base, chainbase, chainlen, compresseddeltalen = d
-
         # full versions are inserted when the needed deltas
         # become comparable to the uncompressed text
         if text is None:
@@ -1336,13 +1368,42 @@
         else:
             textlen = len(text)
 
-        # - 'dist' is the distance from the base revision -- bounding it limits
-        #   the amount of I/O we need to do.
-        # - 'compresseddeltalen' is the sum of the total size of deltas we need
-        #   to apply -- bounding it limits the amount of CPU we consume.
-        if (d is None or dist > textlen * 4 or l > textlen or
-            compresseddeltalen > textlen * 2 or
-            (self._maxchainlen and chainlen > self._maxchainlen)):
+        # should we try to build a delta?
+        if prev != nullrev:
+            if self._generaldelta:
+                if p2r != nullrev and self._aggressivemergedeltas:
+                    d = builddelta(p1r)
+                    d2 = builddelta(p2r)
+                    p1good = self._isgooddelta(d, textlen)
+                    p2good = self._isgooddelta(d2, textlen)
+                    if p1good and p2good:
+                        # If both are good deltas, choose the smallest
+                        if d2[1] < d[1]:
+                            d = d2
+                    elif p2good:
+                        # If only p2 is good, use it
+                        d = d2
+                    elif p1good:
+                        pass
+                    else:
+                        # Neither is good, try against prev to hopefully save us
+                        # a fulltext.
+                        d = builddelta(prev)
+                else:
+                    # Pick whichever parent is closer to us (to minimize the
+                    # chance of having to build a fulltext). Since
+                    # nullrev == -1, any non-merge commit will always pick p1r.
+                    drev = p2r if p2r > p1r else p1r
+                    d = builddelta(drev)
+                    # If the chosen delta will result in us making a full text,
+                    # give it one last try against prev.
+                    if drev != prev and not self._isgooddelta(d, textlen):
+                        d = builddelta(prev)
+            else:
+                d = builddelta(prev)
+            dist, l, data, base, chainbase, chainlen, compresseddeltalen = d
+
+        if not self._isgooddelta(d, textlen):
             text = buildtext()
             data = self.compress(text)
             l = len(data[1]) + len(data[0])
@@ -1356,6 +1417,9 @@
         entry = self._io.packentry(e, self.node, self.version, curr)
         self._writeentry(transaction, ifh, dfh, entry, data, link, offset)
 
+        if alwayscache and text is None:
+            text = buildtext()
+
         if type(text) == str: # only accept immutable objects
             self._cache = (node, curr, text)
         self._basecache = (curr, chainbase)
@@ -1369,7 +1433,6 @@
             if data[0]:
                 dfh.write(data[0])
             dfh.write(data[1])
-            dfh.flush()
             ifh.write(entry)
         else:
             offset += curr * self._io.size
@@ -1379,7 +1442,7 @@
             ifh.write(data[1])
             self.checkinlinesize(transaction, ifh)
 
-    def addgroup(self, bundle, linkmapper, transaction, addrevisioncb=None):
+    def addgroup(self, cg, linkmapper, transaction, addrevisioncb=None):
         """
         add a delta group
 
@@ -1407,7 +1470,7 @@
         else:
             transaction.add(self.indexfile, isize, r)
             transaction.add(self.datafile, end)
-            dfh = self.opener(self.datafile, "a")
+            dfh = self.opener(self.datafile, "a+")
         def flush():
             if dfh:
                 dfh.flush()
@@ -1416,7 +1479,7 @@
             # loop through our set of deltas
             chain = None
             while True:
-                chunkdata = bundle.deltachunk(chain)
+                chunkdata = cg.deltachunk(chain)
                 if not chunkdata:
                     break
                 node = chunkdata['node']
@@ -1459,23 +1522,24 @@
                 if self._peek_iscensored(baserev, delta, flush):
                     flags |= REVIDX_ISCENSORED
 
+                # We assume consumers of addrevisioncb will want to retrieve
+                # the added revision, which will require a call to
+                # revision(). revision() will fast path if there is a cache
+                # hit. So, we tell _addrevision() to always cache in this case.
                 chain = self._addrevision(node, None, transaction, link,
                                           p1, p2, flags, (baserev, delta),
-                                          ifh, dfh)
+                                          ifh, dfh,
+                                          alwayscache=bool(addrevisioncb))
 
                 if addrevisioncb:
-                    # Data for added revision can't be read unless flushed
-                    # because _loadchunk always opensa new file handle and
-                    # there is no guarantee data was actually written yet.
-                    flush()
                     addrevisioncb(self, chain)
 
                 if not dfh and not self._inline:
                     # addrevision switched from inline to conventional
                     # reopen the index
                     ifh.close()
-                    dfh = self.opener(self.datafile, "a")
-                    ifh = self.opener(self.indexfile, "a")
+                    dfh = self.opener(self.datafile, "a+")
+                    ifh = self.opener(self.indexfile, "a+")
         finally:
             if dfh:
                 dfh.close()
--- a/mercurial/revset.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/revset.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,16 +5,26 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-import re
-import parser, util, error, hbisect, phases
-import node
+from __future__ import absolute_import
+
 import heapq
-import match as matchmod
-from i18n import _
-import encoding
-import obsolete as obsmod
-import pathutil
-import repoview
+import re
+
+from .i18n import _
+from . import (
+    destutil,
+    encoding,
+    error,
+    hbisect,
+    match as matchmod,
+    node,
+    obsolete as obsmod,
+    parser,
+    pathutil,
+    phases,
+    repoview,
+    util,
+)
 
 def _revancestors(repo, revs, followfirst):
     """Like revlog.ancestors(), but supports followfirst."""
@@ -78,19 +88,17 @@
 
     return generatorset(iterate(), iterasc=True)
 
-def _revsbetween(repo, roots, heads):
-    """Return all paths between roots and heads, inclusive of both endpoint
-    sets."""
+def _reachablerootspure(repo, minroot, roots, heads, includepath):
+    """return (heads(::<roots> and ::<heads>))
+
+    If includepath is True, return (<roots>::<heads>)."""
     if not roots:
-        return baseset()
+        return []
     parentrevs = repo.changelog.parentrevs
+    roots = set(roots)
     visit = list(heads)
     reachable = set()
     seen = {}
-    # XXX this should be 'parentset.min()' assuming 'parentset' is a smartset
-    # (and if it is not, it should.)
-    minroot = min(roots)
-    roots = set(roots)
     # prefetch all the things! (because python is slow)
     reached = reachable.add
     dovisit = visit.append
@@ -101,6 +109,8 @@
         rev = nextvisit()
         if rev in roots:
             reached(rev)
+            if not includepath:
+                continue
         parents = parentrevs(rev)
         seen[rev] = parents
         for parent in parents:
@@ -108,11 +118,30 @@
                 dovisit(parent)
     if not reachable:
         return baseset()
+    if not includepath:
+        return reachable
     for rev in sorted(seen):
         for parent in seen[rev]:
             if parent in reachable:
                 reached(rev)
-    return baseset(sorted(reachable))
+    return reachable
+
+def reachableroots(repo, roots, heads, includepath=False):
+    """return (heads(::<roots> and ::<heads>))
+
+    If includepath is True, return (<roots>::<heads>)."""
+    if not roots:
+        return baseset()
+    minroot = roots.min()
+    roots = list(roots)
+    heads = list(heads)
+    try:
+        revs = repo.changelog.reachableroots(minroot, heads, roots, includepath)
+    except AttributeError:
+        revs = _reachablerootspure(repo, minroot, roots, heads, includepath)
+    revs = baseset(revs)
+    revs.sort()
+    return revs
 
 elements = {
     # token-type: binding-strength, primary, prefix, infix, suffix
@@ -178,6 +207,21 @@
     if symletters is None:
         symletters = _symletters
 
+    if program and lookup:
+        # attempt to parse old-style ranges first to deal with
+        # things like old-tag which contain query metacharacters
+        parts = program.split(':', 1)
+        if all(lookup(sym) for sym in parts if sym):
+            if parts[0]:
+                yield ('symbol', parts[0], 0)
+            if len(parts) > 1:
+                s = len(parts[0])
+                yield (':', None, s)
+                if parts[1]:
+                    yield ('symbol', parts[1], s + 1)
+            yield ('end', None, len(program))
+            return
+
     pos, l = 0, len(program)
     while pos < l:
         c = program[pos]
@@ -201,7 +245,7 @@
                 c = program[pos]
                 decode = lambda x: x
             else:
-                decode = lambda x: x.decode('string-escape')
+                decode = parser.unescapestr
             pos += 1
             s = pos
             while pos < l: # find closing quote
@@ -382,7 +426,8 @@
 
 def dagrange(repo, subset, x, y):
     r = fullreposet(repo)
-    xs = _revsbetween(repo, getset(repo, r, x), getset(repo, r, y))
+    xs = reachableroots(repo, getset(repo, r, x), getset(repo, r, y),
+                         includepath=True)
     # XXX We should combine with subset first: 'subset & baseset(...)'. This is
     # necessary to ensure we preserve the order in subset.
     return xs & subset
@@ -391,8 +436,13 @@
     return getset(repo, getset(repo, subset, x), y)
 
 def orset(repo, subset, *xs):
-    rs = [getset(repo, subset, x) for x in xs]
-    return _combinesets(rs)
+    assert xs
+    if len(xs) == 1:
+        return getset(repo, subset, xs[0])
+    p = len(xs) // 2
+    a = orset(repo, subset, *xs[:p])
+    b = orset(repo, subset, *xs[p:])
+    return a + b
 
 def notset(repo, subset, x):
     return subset - getset(repo, subset, x)
@@ -414,6 +464,16 @@
 
 # functions
 
+def _destupdate(repo, subset, x):
+    # experimental revset for update destination
+    args = getargsdict(x, 'limit', 'clean check')
+    return subset & baseset([destutil.destupdate(repo, **args)[0]])
+
+def _destmerge(repo, subset, x):
+    # experimental revset for merge destination
+    getargs(x, 0, 0, _("_mergedefaultdest takes no arguments"))
+    return subset & baseset([destutil.destmerge(repo)])
+
 def adds(repo, subset, x):
     """``adds(pattern)``
     Changesets that add a file matching pattern.
@@ -531,13 +591,13 @@
         bm = getstring(args[0],
                        # i18n: "bookmark" is a keyword
                        _('the argument to bookmark must be a string'))
-        kind, pattern, matcher = _stringmatcher(bm)
+        kind, pattern, matcher = util.stringmatcher(bm)
         bms = set()
         if kind == 'literal':
             bmrev = repo._bookmarks.get(pattern, None)
             if not bmrev:
                 raise error.RepoLookupError(_("bookmark '%s' does not exist")
-                                            % bm)
+                                            % pattern)
             bms.add(repo[bmrev].rev())
         else:
             matchrevs = set()
@@ -572,12 +632,15 @@
         # not a string, but another revspec, e.g. tip()
         pass
     else:
-        kind, pattern, matcher = _stringmatcher(b)
+        kind, pattern, matcher = util.stringmatcher(b)
         if kind == 'literal':
             # note: falls through to the revspec case if no branch with
-            # this name exists
+            # this name exists and pattern kind is not specified explicitly
             if pattern in repo.branchmap():
                 return subset.filter(lambda r: matcher(getbi(r)[0]))
+            if b.startswith('literal:'):
+                raise error.RepoLookupError(_("branch '%s' does not exist")
+                                            % pattern)
         else:
             return subset.filter(lambda r: matcher(getbi(r)[0]))
 
@@ -608,7 +671,7 @@
     try:
         bundlerevs = repo.changelog.bundlerevs
     except AttributeError:
-        raise util.Abort(_("no bundle provided - specify with -R"))
+        raise error.Abort(_("no bundle provided - specify with -R"))
     return subset & bundlerevs
 
 def checkstatus(repo, subset, pat, field):
@@ -860,7 +923,7 @@
         # i18n: "extra" is a keyword
         value = getstring(args['value'], _('second argument to extra must be '
                                            'a string'))
-        kind, value, matcher = _stringmatcher(value)
+        kind, value, matcher = util.stringmatcher(value)
 
     def _matchvalue(r):
         extra = repo[r].extra()
@@ -990,34 +1053,37 @@
     return limit(repo, subset, x)
 
 def _follow(repo, subset, x, name, followfirst=False):
-    l = getargs(x, 0, 1, _("%s takes no arguments or a filename") % name)
+    l = getargs(x, 0, 1, _("%s takes no arguments or a pattern") % name)
     c = repo['.']
     if l:
-        x = getstring(l[0], _("%s expected a filename") % name)
-        if x in c:
-            cx = c[x]
-            s = set(ctx.rev() for ctx in cx.ancestors(followfirst=followfirst))
-            # include the revision responsible for the most recent version
-            s.add(cx.introrev())
-        else:
-            return baseset()
+        x = getstring(l[0], _("%s expected a pattern") % name)
+        matcher = matchmod.match(repo.root, repo.getcwd(), [x],
+                                 ctx=repo[None], default='path')
+
+        s = set()
+        for fname in c:
+            if matcher(fname):
+                fctx = c[fname]
+                s = s.union(set(c.rev() for c in fctx.ancestors(followfirst)))
+                # include the revision responsible for the most recent version
+                s.add(fctx.introrev())
     else:
         s = _revancestors(repo, baseset([c.rev()]), followfirst)
 
     return subset & s
 
 def follow(repo, subset, x):
-    """``follow([file])``
+    """``follow([pattern])``
     An alias for ``::.`` (ancestors of the working directory's first parent).
-    If a filename is specified, the history of the given file is followed,
-    including copies.
+    If pattern is specified, the histories of files matching given
+    pattern is followed, including copies.
     """
     return _follow(repo, subset, x, 'follow')
 
 def _followfirst(repo, subset, x):
-    # ``followfirst([file])``
-    # Like ``follow([file])`` but follows only the first parent of
-    # every revision or file revision.
+    # ``followfirst([pattern])``
+    # Like ``follow([pattern])`` but follows only the first parent of
+    # every revisions or files revisions.
     return _follow(repo, subset, x, '_followfirst', followfirst=True)
 
 def getall(repo, subset, x):
@@ -1168,28 +1234,38 @@
     return subset.filter(matches)
 
 def limit(repo, subset, x):
-    """``limit(set, [n])``
-    First n members of set, defaulting to 1.
+    """``limit(set[, n[, offset]])``
+    First n members of set, defaulting to 1, starting from offset.
     """
-    # i18n: "limit" is a keyword
-    l = getargs(x, 1, 2, _("limit requires one or two arguments"))
+    args = getargsdict(x, 'limit', 'set n offset')
+    if 'set' not in args:
+        # i18n: "limit" is a keyword
+        raise error.ParseError(_("limit requires one to three arguments"))
     try:
-        lim = 1
-        if len(l) == 2:
+        lim, ofs = 1, 0
+        if 'n' in args:
             # i18n: "limit" is a keyword
-            lim = int(getstring(l[1], _("limit requires a number")))
+            lim = int(getstring(args['n'], _("limit requires a number")))
+        if 'offset' in args:
+            # i18n: "limit" is a keyword
+            ofs = int(getstring(args['offset'], _("limit requires a number")))
+        if ofs < 0:
+            raise error.ParseError(_("negative offset"))
     except (TypeError, ValueError):
         # i18n: "limit" is a keyword
         raise error.ParseError(_("limit expects a number"))
-    ss = subset
-    os = getset(repo, fullreposet(repo), l[0])
+    os = getset(repo, fullreposet(repo), args['set'])
     result = []
     it = iter(os)
+    for x in xrange(ofs):
+        y = next(it, None)
+        if y is None:
+            break
     for x in xrange(lim):
         y = next(it, None)
         if y is None:
             break
-        elif y in ss:
+        elif y in subset:
             result.append(y)
     return baseset(result)
 
@@ -1207,7 +1283,6 @@
     except (TypeError, ValueError):
         # i18n: "last" is a keyword
         raise error.ParseError(_("last expects a number"))
-    ss = subset
     os = getset(repo, fullreposet(repo), l[0])
     os.reverse()
     result = []
@@ -1216,7 +1291,7 @@
         y = next(it, None)
         if y is None:
             break
-        elif y in ss:
+        elif y in subset:
             result.append(y)
     return baseset(result)
 
@@ -1225,10 +1300,14 @@
     Changeset with highest revision number in set.
     """
     os = getset(repo, fullreposet(repo), x)
-    if os:
+    try:
         m = os.max()
         if m in subset:
             return baseset([m])
+    except ValueError:
+        # os.max() throws a ValueError when the collection is empty.
+        # Same as python's max().
+        pass
     return baseset()
 
 def merge(repo, subset, x):
@@ -1264,10 +1343,14 @@
     Changeset with lowest revision number in set.
     """
     os = getset(repo, fullreposet(repo), x)
-    if os:
+    try:
         m = os.min()
         if m in subset:
             return baseset([m])
+    except ValueError:
+        # os.min() throws a ValueError when the collection is empty.
+        # Same as python's min().
+        pass
     return baseset()
 
 def modifies(repo, subset, x):
@@ -1296,7 +1379,7 @@
     ns = getstring(args[0],
                    # i18n: "named" is a keyword
                    _('the argument to named must be a string'))
-    kind, pattern, matcher = _stringmatcher(ns)
+    kind, pattern, matcher = util.stringmatcher(ns)
     namespaces = set()
     if kind == 'literal':
         if pattern not in repo.names:
@@ -1415,8 +1498,10 @@
     default push location.
     """
     # Avoid cycles.
-    import discovery
-    import hg
+    from . import (
+        discovery,
+        hg,
+    )
     # i18n: "outgoing" is a keyword
     l = getargs(x, 0, 1, _("outgoing takes one or no arguments"))
     # i18n: "outgoing" is a keyword
@@ -1597,7 +1682,7 @@
     synonym for the current local branch.
     """
 
-    import hg # avoid start-up nasties
+    from . import hg # avoid start-up nasties
     # i18n: "remote" is a keyword
     l = getargs(x, 0, 2, _("remote takes one, two or no arguments"))
 
@@ -1862,7 +1947,7 @@
     m = matchmod.exact(repo.root, repo.root, ['.hgsubstate'])
 
     def submatches(names):
-        k, p, m = _stringmatcher(pat)
+        k, p, m = util.stringmatcher(pat)
         for name in names:
             if m(name):
                 yield name
@@ -1892,47 +1977,8 @@
 
     return subset.filter(matches)
 
-def _stringmatcher(pattern):
-    """
-    accepts a string, possibly starting with 're:' or 'literal:' prefix.
-    returns the matcher name, pattern, and matcher function.
-    missing or unknown prefixes are treated as literal matches.
-
-    helper for tests:
-    >>> def test(pattern, *tests):
-    ...     kind, pattern, matcher = _stringmatcher(pattern)
-    ...     return (kind, pattern, [bool(matcher(t)) for t in tests])
-
-    exact matching (no prefix):
-    >>> test('abcdefg', 'abc', 'def', 'abcdefg')
-    ('literal', 'abcdefg', [False, False, True])
-
-    regex matching ('re:' prefix)
-    >>> test('re:a.+b', 'nomatch', 'fooadef', 'fooadefbar')
-    ('re', 'a.+b', [False, False, True])
-
-    force exact matches ('literal:' prefix)
-    >>> test('literal:re:foobar', 'foobar', 're:foobar')
-    ('literal', 're:foobar', [False, True])
-
-    unknown prefixes are ignored and treated as literals
-    >>> test('foo:bar', 'foo', 'bar', 'foo:bar')
-    ('literal', 'foo:bar', [False, False, True])
-    """
-    if pattern.startswith('re:'):
-        pattern = pattern[3:]
-        try:
-            regex = re.compile(pattern)
-        except re.error as e:
-            raise error.ParseError(_('invalid regular expression: %s')
-                                   % e)
-        return 're', pattern, regex.search
-    elif pattern.startswith('literal:'):
-        pattern = pattern[8:]
-    return 'literal', pattern, pattern.__eq__
-
 def _substringmatcher(pattern):
-    kind, pattern, matcher = _stringmatcher(pattern)
+    kind, pattern, matcher = util.stringmatcher(pattern)
     if kind == 'literal':
         matcher = lambda s: pattern in s
     return kind, pattern, matcher
@@ -1952,7 +1998,7 @@
         pattern = getstring(args[0],
                             # i18n: "tag" is a keyword
                             _('the argument to tag must be a string'))
-        kind, pattern, matcher = _stringmatcher(pattern)
+        kind, pattern, matcher = util.stringmatcher(pattern)
         if kind == 'literal':
             # avoid resolving all tags
             tn = repo._tagscache.tags.get(pattern, None)
@@ -2013,14 +2059,17 @@
             r = int(t)
             if str(r) != t or r not in cl:
                 raise ValueError
+            revs = [r]
         except ValueError:
-            r = repo[t].rev()
-        if r in seen:
-            continue
-        if (r in subset
-            or r == node.nullrev and isinstance(subset, fullreposet)):
-            ls.append(r)
-        seen.add(r)
+            revs = stringset(repo, subset, t)
+
+        for r in revs:
+            if r in seen:
+                continue
+            if (r in subset
+                or r == node.nullrev and isinstance(subset, fullreposet)):
+                ls.append(r)
+            seen.add(r)
     return baseset(ls)
 
 # for internal use
@@ -2043,6 +2092,8 @@
     return baseset([r for r in ls if r in s])
 
 symbols = {
+    "_destupdate": _destupdate,
+    "_destmerge": _destmerge,
     "adds": adds,
     "all": getall,
     "ancestor": ancestor,
@@ -2578,7 +2629,7 @@
     alias = _getalias(aliases, tree)
     if alias is not None:
         if alias.error:
-            raise util.Abort(alias.error)
+            raise error.Abort(alias.error)
         if alias in expanding:
             raise error.ParseError(_('infinite expansion of revset alias "%s" '
                                      'detected') % alias.name)
@@ -2592,7 +2643,7 @@
             l = getlist(tree[2])
             if len(l) != len(alias.args):
                 raise error.ParseError(
-                    _('invalid number of arguments: %s') % len(l))
+                    _('invalid number of arguments: %d') % len(l))
             l = [_expandaliases(aliases, a, [], cache) for a in l]
             result = _expandargs(result, dict(zip(alias.args, l)))
     else:
@@ -2654,6 +2705,27 @@
     if repo:
         lookup = repo.__contains__
     tree = parse(spec, lookup)
+    return _makematcher(ui, tree, repo)
+
+def matchany(ui, specs, repo=None):
+    """Create a matcher that will include any revisions matching one of the
+    given specs"""
+    if not specs:
+        def mfunc(repo, subset=None):
+            return baseset()
+        return mfunc
+    if not all(specs):
+        raise error.ParseError(_("empty query"))
+    lookup = None
+    if repo:
+        lookup = repo.__contains__
+    if len(specs) == 1:
+        tree = parse(specs[0], lookup)
+    else:
+        tree = ('or',) + tuple(parse(s, lookup) for s in specs)
+    return _makematcher(ui, tree, repo)
+
+def _makematcher(ui, tree, repo):
     if ui:
         tree = findaliases(ui, tree, showwarning=ui.warn)
     tree = foldconcat(tree)
@@ -2756,7 +2828,7 @@
                 ret += listexp(list(args[arg]), d)
                 arg += 1
             else:
-                raise util.Abort('unexpected revspec format character %s' % d)
+                raise error.Abort('unexpected revspec format character %s' % d)
         else:
             ret += c
         pos += 1
@@ -2813,6 +2885,7 @@
         """True if the set will iterate in descending order"""
         raise NotImplementedError()
 
+    @util.cachefunc
     def min(self):
         """return the minimum element in the set"""
         if self.fastasc is not None:
@@ -2821,6 +2894,7 @@
             raise ValueError('arg is an empty sequence')
         return min(self)
 
+    @util.cachefunc
     def max(self):
         """return the maximum element in the set"""
         if self.fastdesc is not None:
@@ -2896,6 +2970,8 @@
     """
     def __init__(self, data=()):
         if not isinstance(data, list):
+            if isinstance(data, set):
+                self._set = data
             data = list(data)
         self._list = data
         self._ascending = None
@@ -2995,14 +3071,9 @@
         """
         self._subset = subset
         self._condition = condition
-        self._cache = {}
 
     def __contains__(self, x):
-        c = self._cache
-        if x not in c:
-            v = c[x] = x in self._subset and self._condition(x)
-            return v
-        return c[x]
+        return x in self._subset and self._condition(x)
 
     def __iter__(self):
         return self._iterfilter(self._subset)
@@ -3028,7 +3099,15 @@
         return lambda: self._iterfilter(it())
 
     def __nonzero__(self):
-        for r in self:
+        fast = self.fastasc
+        if fast is None:
+            fast = self.fastdesc
+        if fast is not None:
+            it = fast()
+        else:
+            it = self
+
+        for r in it:
             return True
         return False
 
@@ -3073,20 +3152,6 @@
     def __repr__(self):
         return '<%s %r>' % (type(self).__name__, self._subset)
 
-# this function will be removed, or merged to addset or orset, when
-# - scmutil.revrange() can be rewritten to not combine calculated smartsets
-# - or addset can handle more than two sets without balanced tree
-def _combinesets(subsets):
-    """Create balanced tree of addsets representing union of given sets"""
-    if not subsets:
-        return baseset()
-    if len(subsets) == 1:
-        return subsets[0]
-    p = len(subsets) // 2
-    xs = _combinesets(subsets[:p])
-    ys = _combinesets(subsets[p:])
-    return addset(xs, ys)
-
 def _iterordered(ascending, iter1, iter2):
     """produce an ordered iteration from two iterators with the same order
 
--- a/mercurial/scmutil.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/scmutil.py	Tue Oct 20 15:59:10 2015 -0500
@@ -6,7 +6,7 @@
 # GNU General Public License version 2 or any later version.
 
 from i18n import _
-from mercurial.node import nullrev, wdirrev
+from mercurial.node import wdirrev
 import util, error, osutil, revset, similar, encoding, phases
 import pathutil
 import match as matchmod
@@ -123,20 +123,20 @@
     # Do not use the "kind" parameter in ui output.
     # It makes strings difficult to translate.
     if lbl in ['tip', '.', 'null']:
-        raise util.Abort(_("the name '%s' is reserved") % lbl)
+        raise error.Abort(_("the name '%s' is reserved") % lbl)
     for c in (':', '\0', '\n', '\r'):
         if c in lbl:
-            raise util.Abort(_("%r cannot be used in a name") % c)
+            raise error.Abort(_("%r cannot be used in a name") % c)
     try:
         int(lbl)
-        raise util.Abort(_("cannot use an integer as a name"))
+        raise error.Abort(_("cannot use an integer as a name"))
     except ValueError:
         pass
 
 def checkfilename(f):
     '''Check that the filename f is an acceptable filename for a tracked file'''
     if '\r' in f or '\n' in f:
-        raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
+        raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
 
 def checkportable(ui, f):
     '''Check if filename f is portable and warn or abort depending on config'''
@@ -147,7 +147,7 @@
         if msg:
             msg = "%s: %r" % (msg, f)
             if abort:
-                raise util.Abort(msg)
+                raise error.Abort(msg)
             ui.warn(_("warning: %s\n") % msg)
 
 def checkportabilityalert(ui):
@@ -182,7 +182,7 @@
         if fl in self._loweredfiles and f not in self._dirstate:
             msg = _('possible case-folding collision for %s') % f
             if self._abort:
-                raise util.Abort(msg)
+                raise error.Abort(msg)
             self._ui.warn(_("warning: %s\n") % msg)
         self._loweredfiles.add(fl)
         self._newfiles.add(f)
@@ -475,7 +475,7 @@
         if self._audit:
             r = util.checkosfilename(path)
             if r:
-                raise util.Abort("%s: %r" % (r, path))
+                raise error.Abort("%s: %r" % (r, path))
         self.audit(path)
         f = self.join(path)
 
@@ -583,9 +583,11 @@
 
     def __call__(self, path, mode='r', *args, **kw):
         if mode not in ('r', 'rb'):
-            raise util.Abort('this vfs is read only')
+            raise error.Abort('this vfs is read only')
         return self.vfs(path, mode, *args, **kw)
 
+    def join(self, path, *insidef):
+        return self.vfs.join(path, *insidef)
 
 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
     '''yield every hg repository under path, always recursively.
@@ -687,9 +689,14 @@
 
     l = revrange(repo, [revspec])
     if not l:
-        raise util.Abort(_('empty revision set'))
+        raise error.Abort(_('empty revision set'))
     return repo[l.last()]
 
+def _pairspec(revspec):
+    tree = revset.parse(revspec)
+    tree = revset.optimize(tree, True)[1]  # fix up "x^:y" -> "(x^):y"
+    return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
+
 def revpair(repo, revs):
     if not revs:
         return repo.dirstate.p1(), None
@@ -709,69 +716,39 @@
         second = l.last()
 
     if first is None:
-        raise util.Abort(_('empty revision range'))
+        raise error.Abort(_('empty revision range'))
 
-    if first == second and len(revs) == 1 and _revrangesep not in revs[0]:
+    # if top-level is range expression, the result must always be a pair
+    if first == second and len(revs) == 1 and not _pairspec(revs[0]):
         return repo.lookup(first), None
 
     return repo.lookup(first), repo.lookup(second)
 
-_revrangesep = ':'
-
 def revrange(repo, revs):
     """Yield revision as strings from a list of revision specifications."""
-
-    def revfix(repo, val, defval):
-        if not val and val != 0 and defval is not None:
-            return defval
-        return repo[val].rev()
-
-    subsets = []
-
-    revsetaliases = [alias for (alias, _) in
-                     repo.ui.configitems("revsetalias")]
-
+    allspecs = []
     for spec in revs:
-        # attempt to parse old-style ranges first to deal with
-        # things like old-tag which contain query metacharacters
-        try:
-            # ... except for revset aliases without arguments. These
-            # should be parsed as soon as possible, because they might
-            # clash with a hash prefix.
-            if spec in revsetaliases:
-                raise error.RepoLookupError
+        if isinstance(spec, int):
+            spec = revset.formatspec('rev(%d)', spec)
+        allspecs.append(spec)
+    m = revset.matchany(repo.ui, allspecs, repo)
+    return m(repo)
 
-            if isinstance(spec, int):
-                subsets.append(revset.baseset([spec]))
-                continue
+def meaningfulparents(repo, ctx):
+    """Return list of meaningful (or all if debug) parentrevs for rev.
 
-            if _revrangesep in spec:
-                start, end = spec.split(_revrangesep, 1)
-                if start in revsetaliases or end in revsetaliases:
-                    raise error.RepoLookupError
-
-                start = revfix(repo, start, 0)
-                end = revfix(repo, end, len(repo) - 1)
-                if end == nullrev and start < 0:
-                    start = nullrev
-                if start < end:
-                    l = revset.spanset(repo, start, end + 1)
-                else:
-                    l = revset.spanset(repo, start, end - 1)
-                subsets.append(l)
-                continue
-            elif spec and spec in repo: # single unquoted rev
-                rev = revfix(repo, spec, None)
-                subsets.append(revset.baseset([rev]))
-                continue
-        except error.RepoLookupError:
-            pass
-
-        # fall through to new-style queries if old-style fails
-        m = revset.match(repo.ui, spec, repo)
-        subsets.append(m(repo))
-
-    return revset._combinesets(subsets)
+    For merges (two non-nullrev revisions) both parents are meaningful.
+    Otherwise the first parent revision is considered meaningful if it
+    is not the preceding revision.
+    """
+    parents = ctx.parents()
+    if len(parents) > 1:
+        return parents
+    if repo.ui.debugflag:
+        return [parents[0], repo['null']]
+    if parents[0].rev() >= intrev(ctx.rev()) - 1:
+        return []
+    return parents
 
 def expandpats(pats):
     '''Expand bare globs when running on windows.
@@ -792,13 +769,15 @@
         ret.append(kindpat)
     return ret
 
-def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath',
+def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
                  badfn=None):
     '''Return a matcher and the patterns that were used.
     The matcher will warn about bad matches, unless an alternate badfn callback
     is provided.'''
     if pats == ("",):
         pats = []
+    if opts is None:
+        opts = {}
     if not globbed and default == 'relpath':
         pats = expandpats(pats or [])
 
@@ -815,7 +794,8 @@
         pats = []
     return m, pats
 
-def match(ctx, pats=[], opts={}, globbed=False, default='relpath', badfn=None):
+def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
+          badfn=None):
     '''Return a matcher that will warn about bad matches.'''
     return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
 
@@ -827,7 +807,9 @@
     '''Return a matcher that will efficiently match exactly these files.'''
     return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
 
-def addremove(repo, matcher, prefix, opts={}, dry_run=None, similarity=None):
+def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
+    if opts is None:
+        opts = {}
     m = matcher
     if dry_run is None:
         dry_run = opts.get('dry_run')
@@ -1009,7 +991,7 @@
         raise error.RequirementError(
             _("repository requires features unknown to this Mercurial: %s")
             % " ".join(missings),
-            hint=_("see http://mercurial.selenic.com/wiki/MissingRequirement"
+            hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
                    " for more information"))
     return requirements
 
@@ -1103,7 +1085,7 @@
     Mercurial either atomic renames or appends for files under .hg,
     so to ensure the cache is reliable we need the filesystem to be able
     to tell us if a file has been replaced. If it can't, we fallback to
-    recreating the object on every call (essentially the same behaviour as
+    recreating the object on every call (essentially the same behavior as
     propertycache).
 
     '''
@@ -1166,3 +1148,22 @@
             del obj.__dict__[self.name]
         except KeyError:
             raise AttributeError(self.name)
+
+def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
+    if lock is None:
+        raise error.LockInheritanceContractViolation(
+            'lock can only be inherited while held')
+    if environ is None:
+        environ = {}
+    with lock.inherit() as locker:
+        environ[envvar] = locker
+        return repo.ui.system(cmd, environ=environ, *args, **kwargs)
+
+def wlocksub(repo, cmd, *args, **kwargs):
+    """run cmd as a subprocess that allows inheriting repo's wlock
+
+    This can only be called while the wlock is held. This takes all the
+    arguments that ui.system does, and returns the exit code of the
+    subprocess."""
+    return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
+                    **kwargs)
--- a/mercurial/scmwindows.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/scmwindows.py	Tue Oct 20 15:59:10 2015 -0500
@@ -9,16 +9,13 @@
     filename = util.executablepath()
     # Use mercurial.ini found in directory with hg.exe
     progrc = os.path.join(os.path.dirname(filename), 'mercurial.ini')
-    if os.path.isfile(progrc):
-        rcpath.append(progrc)
-        return rcpath
+    rcpath.append(progrc)
     # Use hgrc.d found in directory with hg.exe
     progrcd = os.path.join(os.path.dirname(filename), 'hgrc.d')
     if os.path.isdir(progrcd):
         for f, kind in osutil.listdir(progrcd):
             if f.endswith('.rc'):
                 rcpath.append(os.path.join(progrcd, f))
-        return rcpath
     # else look for a system rcpath in the registry
     value = util.lookupreg('SOFTWARE\\Mercurial', None,
                            _winreg.HKEY_LOCAL_MACHINE)
--- a/mercurial/setdiscovery.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/setdiscovery.py	Tue Oct 20 15:59:10 2015 -0500
@@ -40,11 +40,20 @@
 classified with it (since all ancestors or descendants will be marked as well).
 """
 
+from __future__ import absolute_import
+
 import collections
-from node import nullid, nullrev
-from i18n import _
 import random
-import util, dagutil
+
+from .i18n import _
+from .node import (
+    nullid,
+    nullrev,
+)
+from . import (
+    dagutil,
+    error,
+)
 
 def _updatesample(dag, nodes, sample, quicksamplesize=0):
     """update an existing sample to match the expected size
@@ -138,22 +147,12 @@
     sample = _limitsample(ownheads, initialsamplesize)
     # indices between sample and externalized version must match
     sample = list(sample)
-    if remote.local():
-        # stopgap until we have a proper localpeer that supports batch()
-        srvheadhashes = remote.heads()
-        yesno = remote.known(dag.externalizeall(sample))
-    elif remote.capable('batch'):
-        batch = remote.batch()
-        srvheadhashesref = batch.heads()
-        yesnoref = batch.known(dag.externalizeall(sample))
-        batch.submit()
-        srvheadhashes = srvheadhashesref.value
-        yesno = yesnoref.value
-    else:
-        # compatibility with pre-batch, but post-known remotes during 1.9
-        # development
-        srvheadhashes = remote.heads()
-        sample = []
+    batch = remote.batch()
+    srvheadhashesref = batch.heads()
+    yesnoref = batch.known(dag.externalizeall(sample))
+    batch.submit()
+    srvheadhashes = srvheadhashesref.value
+    yesno = yesnoref.value
 
     if cl.tip() == nullid:
         if srvheadhashes != [nullid]:
@@ -242,7 +241,7 @@
 
     if not result and srvheadhashes != [nullid]:
         if abortwhenunrelated:
-            raise util.Abort(_("repository is unrelated"))
+            raise error.Abort(_("repository is unrelated"))
         else:
             ui.warn(_("warning: repository is unrelated\n"))
         return (set([nullid]), True, srvheadhashes,)
--- a/mercurial/simplemerge.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/simplemerge.py	Tue Oct 20 15:59:10 2015 -0500
@@ -16,9 +16,18 @@
 # mbp: "you know that thing where cvs gives you conflict markers?"
 # s: "i hate that."
 
-from i18n import _
-import scmutil, util, mdiff
-import sys, os
+from __future__ import absolute_import
+
+import os
+import sys
+
+from .i18n import _
+from . import (
+    error,
+    mdiff,
+    scmutil,
+    util,
+)
 
 class CantReprocessAndShowBase(Exception):
     pass
@@ -82,7 +91,8 @@
                     start_marker='<<<<<<<',
                     mid_marker='=======',
                     end_marker='>>>>>>>',
-                    base_marker=None):
+                    base_marker=None,
+                    localorother=None):
         """Return merge in cvs-like form.
         """
         self.conflicts = False
@@ -92,9 +102,9 @@
                 newline = '\r\n'
             elif self.a[0].endswith('\r'):
                 newline = '\r'
-        if name_a:
+        if name_a and start_marker:
             start_marker = start_marker + ' ' + name_a
-        if name_b:
+        if name_b and end_marker:
             end_marker = end_marker + ' ' + name_b
         if name_base and base_marker:
             base_marker = base_marker + ' ' + name_base
@@ -111,18 +121,28 @@
                 for i in range(t[1], t[2]):
                     yield self.b[i]
             elif what == 'conflict':
-                self.conflicts = True
-                yield start_marker + newline
-                for i in range(t[3], t[4]):
-                    yield self.a[i]
-                if base_marker is not None:
-                    yield base_marker + newline
-                    for i in range(t[1], t[2]):
-                        yield self.base[i]
-                yield mid_marker + newline
-                for i in range(t[5], t[6]):
-                    yield self.b[i]
-                yield end_marker + newline
+                if localorother == 'local':
+                    for i in range(t[3], t[4]):
+                        yield self.a[i]
+                elif localorother == 'other':
+                    for i in range(t[5], t[6]):
+                        yield self.b[i]
+                else:
+                    self.conflicts = True
+                    if start_marker is not None:
+                        yield start_marker + newline
+                    for i in range(t[3], t[4]):
+                        yield self.a[i]
+                    if base_marker is not None:
+                        yield base_marker + newline
+                        for i in range(t[1], t[2]):
+                            yield self.base[i]
+                    if mid_marker is not None:
+                        yield mid_marker + newline
+                    for i in range(t[5], t[6]):
+                        yield self.b[i]
+                    if end_marker is not None:
+                        yield end_marker + newline
             else:
                 raise ValueError(what)
 
@@ -342,27 +362,33 @@
             if not opts.get('quiet'):
                 ui.warn(_('warning: %s\n') % msg)
             if not opts.get('text'):
-                raise util.Abort(msg)
+                raise error.Abort(msg)
         return text
 
-    name_a = local
-    name_b = other
-    name_base = None
-    labels = opts.get('label', [])
-    if len(labels) > 0:
-        name_a = labels[0]
-    if len(labels) > 1:
-        name_b = labels[1]
-    if len(labels) > 2:
-        name_base = labels[2]
-    if len(labels) > 3:
-        raise util.Abort(_("can only specify three labels."))
+    mode = opts.get('mode','merge')
+    if mode == 'union':
+        name_a = None
+        name_b = None
+        name_base = None
+    else:
+        name_a = local
+        name_b = other
+        name_base = None
+        labels = opts.get('label', [])
+        if len(labels) > 0:
+            name_a = labels[0]
+        if len(labels) > 1:
+            name_b = labels[1]
+        if len(labels) > 2:
+            name_base = labels[2]
+        if len(labels) > 3:
+            raise error.Abort(_("can only specify three labels."))
 
     try:
         localtext = readfile(local)
         basetext = readfile(base)
         othertext = readfile(other)
-    except util.Abort:
+    except error.Abort:
         return 1
 
     local = os.path.realpath(local)
@@ -373,8 +399,12 @@
         out = sys.stdout
 
     m3 = Merge3Text(basetext, localtext, othertext)
-    extrakwargs = {}
-    if name_base is not None:
+    extrakwargs = {"localorother": opts.get("localorother", None)}
+    if mode == 'union':
+        extrakwargs['start_marker'] = None
+        extrakwargs['mid_marker'] = None
+        extrakwargs['end_marker'] = None
+    elif name_base is not None:
         extrakwargs['base_marker'] = '|||||||'
         extrakwargs['name_base'] = name_base
     for line in m3.merge_lines(name_a=name_a, name_b=name_b, **extrakwargs):
@@ -383,7 +413,5 @@
     if not opts.get('print'):
         out.close()
 
-    if m3.conflicts:
-        if not opts.get('quiet'):
-            ui.warn(_("warning: conflicts during merge.\n"))
+    if m3.conflicts and not mode == 'union':
         return 1
--- a/mercurial/sshpeer.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/sshpeer.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,9 +5,16 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
+from __future__ import absolute_import
+
 import re
-from i18n import _
-import util, error, wireproto
+
+from .i18n import _
+from . import (
+    error,
+    util,
+    wireproto,
+)
 
 class remotelock(object):
     def __init__(self, repo):
@@ -51,7 +58,7 @@
 
     The main pipe is expected to be a 'bufferedinputpipe' from the util module
     that handle all the os specific bites. This class lives in this module
-    because it focus on behavior specifig to the ssh protocol."""
+    because it focus on behavior specific to the ssh protocol."""
 
     def __init__(self, ui, main, side):
         self._ui = ui
@@ -270,7 +277,7 @@
         r = self._call(cmd, **args)
         if r:
             # XXX needs to be made better
-            raise util.Abort('unexpected remote reply: %s' % r)
+            raise error.Abort('unexpected remote reply: %s' % r)
         while True:
             d = fp.read(4096)
             if not d:
--- a/mercurial/sshserver.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/sshserver.py	Tue Oct 20 15:59:10 2015 -0500
@@ -6,8 +6,17 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-import util, hook, wireproto
-import os, sys
+from __future__ import absolute_import
+
+import os
+import sys
+
+from . import (
+    error,
+    hook,
+    util,
+    wireproto,
+)
 
 class sshserver(wireproto.abstractserverproto):
     def __init__(self, ui, repo):
@@ -31,7 +40,7 @@
             argline = self.fin.readline()[:-1]
             arg, l = argline.split()
             if arg not in keys:
-                raise util.Abort("unexpected parameter %r" % arg)
+                raise error.Abort("unexpected parameter %r" % arg)
             if arg == '*':
                 star = {}
                 for k in xrange(int(l)):
--- a/mercurial/sslutil.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/sslutil.py	Tue Oct 20 15:59:10 2015 -0500
@@ -6,10 +6,17 @@
 #
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
-import os, sys, ssl
+
+from __future__ import absolute_import
 
-from mercurial import util
-from mercurial.i18n import _
+import os
+import ssl
+import sys
+
+from .i18n import _
+from . import error, util
+
+hassni = getattr(ssl, 'HAS_SNI', False)
 
 _canloaddefaultcerts = False
 try:
@@ -45,7 +52,7 @@
         # closed
         # - see http://bugs.python.org/issue13721
         if not sslsocket.cipher():
-            raise util.Abort(_('ssl connection failed'))
+            raise error.Abort(_('ssl connection failed'))
         return sslsocket
 except AttributeError:
     def wrapsocket(sock, keyfile, certfile, ui, cert_reqs=ssl.CERT_NONE,
@@ -57,7 +64,7 @@
         # closed
         # - see http://bugs.python.org/issue13721
         if not sslsocket.cipher():
-            raise util.Abort(_('ssl connection failed'))
+            raise error.Abort(_('ssl connection failed'))
         return sslsocket
 
 def _verifycert(cert, hostname):
@@ -135,7 +142,7 @@
     elif cacerts:
         cacerts = util.expandpath(cacerts)
         if not os.path.exists(cacerts):
-            raise util.Abort(_('could not find web.cacerts: %s') % cacerts)
+            raise error.Abort(_('could not find web.cacerts: %s') % cacerts)
     else:
         cacerts = _defaultcacerts()
         if cacerts and cacerts != '!':
@@ -158,15 +165,15 @@
         hostfingerprint = self.ui.config('hostfingerprints', host)
 
         if not sock.cipher(): # work around http://bugs.python.org/issue13721
-            raise util.Abort(_('%s ssl connection error') % host)
+            raise error.Abort(_('%s ssl connection error') % host)
         try:
             peercert = sock.getpeercert(True)
             peercert2 = sock.getpeercert()
         except AttributeError:
-            raise util.Abort(_('%s ssl connection error') % host)
+            raise error.Abort(_('%s ssl connection error') % host)
 
         if not peercert:
-            raise util.Abort(_('%s certificate error: '
+            raise error.Abort(_('%s certificate error: '
                                'no certificate received') % host)
         peerfingerprint = util.sha1(peercert).hexdigest()
         nicefingerprint = ":".join([peerfingerprint[x:x + 2]
@@ -174,7 +181,7 @@
         if hostfingerprint:
             if peerfingerprint.lower() != \
                     hostfingerprint.replace(':', '').lower():
-                raise util.Abort(_('certificate for %s has unexpected '
+                raise error.Abort(_('certificate for %s has unexpected '
                                    'fingerprint %s') % (host, nicefingerprint),
                                  hint=_('check hostfingerprint configuration'))
             self.ui.debug('%s certificate matched fingerprint %s\n' %
@@ -182,13 +189,13 @@
         elif cacerts != '!':
             msg = _verifycert(peercert2, host)
             if msg:
-                raise util.Abort(_('%s certificate error: %s') % (host, msg),
+                raise error.Abort(_('%s certificate error: %s') % (host, msg),
                                  hint=_('configure hostfingerprint %s or use '
                                         '--insecure to connect insecurely') %
                                       nicefingerprint)
             self.ui.debug('%s certificate successfully verified\n' % host)
         elif strict:
-            raise util.Abort(_('%s certificate with fingerprint %s not '
+            raise error.Abort(_('%s certificate with fingerprint %s not '
                                'verified') % (host, nicefingerprint),
                              hint=_('check hostfingerprints or web.cacerts '
                                      'config setting'))
--- a/mercurial/statichttprepo.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/statichttprepo.py	Tue Oct 20 15:59:10 2015 -0500
@@ -7,10 +7,26 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-from i18n import _
-import changelog, byterange, url, error, namespaces
-import localrepo, manifest, util, scmutil, store
-import urllib, urllib2, errno, os
+from __future__ import absolute_import
+
+import errno
+import os
+import urllib
+import urllib2
+
+from .i18n import _
+from . import (
+    byterange,
+    changelog,
+    error,
+    localrepo,
+    manifest,
+    namespaces,
+    scmutil,
+    store,
+    url,
+    util,
+)
 
 class httprangereader(object):
     def __init__(self, url, opener):
@@ -155,9 +171,9 @@
         return statichttppeer(self)
 
     def lock(self, wait=True):
-        raise util.Abort(_('cannot lock static-http repository'))
+        raise error.Abort(_('cannot lock static-http repository'))
 
 def instance(ui, path, create):
     if create:
-        raise util.Abort(_('cannot create new static-http repository'))
+        raise error.Abort(_('cannot create new static-http repository'))
     return statichttprepository(ui, path[7:])
--- a/mercurial/store.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/store.py	Tue Oct 20 15:59:10 2015 -0500
@@ -6,7 +6,7 @@
 # GNU General Public License version 2 or any later version.
 
 from i18n import _
-import scmutil, util, parsers
+import scmutil, util, parsers, error
 import os, stat, errno
 
 _sha = util.sha1
@@ -404,8 +404,8 @@
             fp.seek(0)
             for n, line in enumerate(fp):
                 if not line.rstrip('\n'):
-                    t = _('invalid entry in fncache, line %s') % (n + 1)
-                    raise util.Abort(t)
+                    t = _('invalid entry in fncache, line %d') % (n + 1)
+                    raise error.Abort(t)
         fp.close()
 
     def write(self, tr):
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/streamclone.py	Tue Oct 20 15:59:10 2015 -0500
@@ -0,0 +1,392 @@
+# streamclone.py - producing and consuming streaming repository data
+#
+# Copyright 2015 Gregory Szorc <gregory.szorc@gmail.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from __future__ import absolute_import
+
+import struct
+import time
+
+from .i18n import _
+from . import (
+    branchmap,
+    error,
+    store,
+    util,
+)
+
+def canperformstreamclone(pullop, bailifbundle2supported=False):
+    """Whether it is possible to perform a streaming clone as part of pull.
+
+    ``bailifbundle2supported`` will cause the function to return False if
+    bundle2 stream clones are supported. It should only be called by the
+    legacy stream clone code path.
+
+    Returns a tuple of (supported, requirements). ``supported`` is True if
+    streaming clone is supported and False otherwise. ``requirements`` is
+    a set of repo requirements from the remote, or ``None`` if stream clone
+    isn't supported.
+    """
+    repo = pullop.repo
+    remote = pullop.remote
+
+    bundle2supported = False
+    if pullop.canusebundle2:
+        if 'v1' in pullop.remotebundle2caps.get('stream', []):
+            bundle2supported = True
+        # else
+            # Server doesn't support bundle2 stream clone or doesn't support
+            # the versions we support. Fall back and possibly allow legacy.
+
+    # Ensures legacy code path uses available bundle2.
+    if bailifbundle2supported and bundle2supported:
+        return False, None
+    # Ensures bundle2 doesn't try to do a stream clone if it isn't supported.
+    #elif not bailifbundle2supported and not bundle2supported:
+    #    return False, None
+
+    # Streaming clone only works on empty repositories.
+    if len(repo):
+        return False, None
+
+    # Streaming clone only works if all data is being requested.
+    if pullop.heads:
+        return False, None
+
+    streamrequested = pullop.streamclonerequested
+
+    # If we don't have a preference, let the server decide for us. This
+    # likely only comes into play in LANs.
+    if streamrequested is None:
+        # The server can advertise whether to prefer streaming clone.
+        streamrequested = remote.capable('stream-preferred')
+
+    if not streamrequested:
+        return False, None
+
+    # In order for stream clone to work, the client has to support all the
+    # requirements advertised by the server.
+    #
+    # The server advertises its requirements via the "stream" and "streamreqs"
+    # capability. "stream" (a value-less capability) is advertised if and only
+    # if the only requirement is "revlogv1." Else, the "streamreqs" capability
+    # is advertised and contains a comma-delimited list of requirements.
+    requirements = set()
+    if remote.capable('stream'):
+        requirements.add('revlogv1')
+    else:
+        streamreqs = remote.capable('streamreqs')
+        # This is weird and shouldn't happen with modern servers.
+        if not streamreqs:
+            return False, None
+
+        streamreqs = set(streamreqs.split(','))
+        # Server requires something we don't support. Bail.
+        if streamreqs - repo.supportedformats:
+            return False, None
+        requirements = streamreqs
+
+    return True, requirements
+
+def maybeperformlegacystreamclone(pullop):
+    """Possibly perform a legacy stream clone operation.
+
+    Legacy stream clones are performed as part of pull but before all other
+    operations.
+
+    A legacy stream clone will not be performed if a bundle2 stream clone is
+    supported.
+    """
+    supported, requirements = canperformstreamclone(pullop)
+
+    if not supported:
+        return
+
+    repo = pullop.repo
+    remote = pullop.remote
+
+    # Save remote branchmap. We will use it later to speed up branchcache
+    # creation.
+    rbranchmap = None
+    if remote.capable('branchmap'):
+        rbranchmap = remote.branchmap()
+
+    repo.ui.status(_('streaming all changes\n'))
+
+    fp = remote.stream_out()
+    l = fp.readline()
+    try:
+        resp = int(l)
+    except ValueError:
+        raise error.ResponseError(
+            _('unexpected response from remote server:'), l)
+    if resp == 1:
+        raise error.Abort(_('operation forbidden by server'))
+    elif resp == 2:
+        raise error.Abort(_('locking the remote repository failed'))
+    elif resp != 0:
+        raise error.Abort(_('the server sent an unknown error code'))
+
+    l = fp.readline()
+    try:
+        filecount, bytecount = map(int, l.split(' ', 1))
+    except (ValueError, TypeError):
+        raise error.ResponseError(
+            _('unexpected response from remote server:'), l)
+
+    lock = repo.lock()
+    try:
+        consumev1(repo, fp, filecount, bytecount)
+
+        # new requirements = old non-format requirements +
+        #                    new format-related remote requirements
+        # requirements from the streamed-in repository
+        repo.requirements = requirements | (
+                repo.requirements - repo.supportedformats)
+        repo._applyopenerreqs()
+        repo._writerequirements()
+
+        if rbranchmap:
+            branchmap.replacecache(repo, rbranchmap)
+
+        repo.invalidate()
+    finally:
+        lock.release()
+
+def allowservergeneration(ui):
+    """Whether streaming clones are allowed from the server."""
+    return ui.configbool('server', 'uncompressed', True, untrusted=True)
+
+# This is it's own function so extensions can override it.
+def _walkstreamfiles(repo):
+    return repo.store.walk()
+
+def generatev1(repo):
+    """Emit content for version 1 of a streaming clone.
+
+    This returns a 3-tuple of (file count, byte size, data iterator).
+
+    The data iterator consists of N entries for each file being transferred.
+    Each file entry starts as a line with the file name and integer size
+    delimited by a null byte.
+
+    The raw file data follows. Following the raw file data is the next file
+    entry, or EOF.
+
+    When used on the wire protocol, an additional line indicating protocol
+    success will be prepended to the stream. This function is not responsible
+    for adding it.
+
+    This function will obtain a repository lock to ensure a consistent view of
+    the store is captured. It therefore may raise LockError.
+    """
+    entries = []
+    total_bytes = 0
+    # Get consistent snapshot of repo, lock during scan.
+    lock = repo.lock()
+    try:
+        repo.ui.debug('scanning\n')
+        for name, ename, size in _walkstreamfiles(repo):
+            if size:
+                entries.append((name, size))
+                total_bytes += size
+    finally:
+            lock.release()
+
+    repo.ui.debug('%d files, %d bytes to transfer\n' %
+                  (len(entries), total_bytes))
+
+    svfs = repo.svfs
+    oldaudit = svfs.mustaudit
+    debugflag = repo.ui.debugflag
+    svfs.mustaudit = False
+
+    def emitrevlogdata():
+        try:
+            for name, size in entries:
+                if debugflag:
+                    repo.ui.debug('sending %s (%d bytes)\n' % (name, size))
+                # partially encode name over the wire for backwards compat
+                yield '%s\0%d\n' % (store.encodedir(name), size)
+                if size <= 65536:
+                    fp = svfs(name)
+                    try:
+                        data = fp.read(size)
+                    finally:
+                        fp.close()
+                    yield data
+                else:
+                    for chunk in util.filechunkiter(svfs(name), limit=size):
+                        yield chunk
+        finally:
+            svfs.mustaudit = oldaudit
+
+    return len(entries), total_bytes, emitrevlogdata()
+
+def generatev1wireproto(repo):
+    """Emit content for version 1 of streaming clone suitable for the wire.
+
+    This is the data output from ``generatev1()`` with a header line
+    indicating file count and byte size.
+    """
+    filecount, bytecount, it = generatev1(repo)
+    yield '%d %d\n' % (filecount, bytecount)
+    for chunk in it:
+        yield chunk
+
+def generatebundlev1(repo, compression='UN'):
+    """Emit content for version 1 of a stream clone bundle.
+
+    The first 4 bytes of the output ("HGS1") denote this as stream clone
+    bundle version 1.
+
+    The next 2 bytes indicate the compression type. Only "UN" is currently
+    supported.
+
+    The next 16 bytes are two 64-bit big endian unsigned integers indicating
+    file count and byte count, respectively.
+
+    The next 2 bytes is a 16-bit big endian unsigned short declaring the length
+    of the requirements string, including a trailing \0. The following N bytes
+    are the requirements string, which is ASCII containing a comma-delimited
+    list of repo requirements that are needed to support the data.
+
+    The remaining content is the output of ``generatev1()`` (which may be
+    compressed in the future).
+
+    Returns a tuple of (requirements, data generator).
+    """
+    if compression != 'UN':
+        raise ValueError('we do not support the compression argument yet')
+
+    requirements = repo.requirements & repo.supportedformats
+    requires = ','.join(sorted(requirements))
+
+    def gen():
+        yield 'HGS1'
+        yield compression
+
+        filecount, bytecount, it = generatev1(repo)
+        repo.ui.status(_('writing %d bytes for %d files\n') %
+                         (bytecount, filecount))
+
+        yield struct.pack('>QQ', filecount, bytecount)
+        yield struct.pack('>H', len(requires) + 1)
+        yield requires + '\0'
+
+        # This is where we'll add compression in the future.
+        assert compression == 'UN'
+
+        seen = 0
+        repo.ui.progress(_('bundle'), 0, total=bytecount)
+
+        for chunk in it:
+            seen += len(chunk)
+            repo.ui.progress(_('bundle'), seen, total=bytecount)
+            yield chunk
+
+        repo.ui.progress(_('bundle'), None)
+
+    return requirements, gen()
+
+def consumev1(repo, fp, filecount, bytecount):
+    """Apply the contents from version 1 of a streaming clone file handle.
+
+    This takes the output from "streamout" and applies it to the specified
+    repository.
+
+    Like "streamout," the status line added by the wire protocol is not handled
+    by this function.
+    """
+    lock = repo.lock()
+    try:
+        repo.ui.status(_('%d files to transfer, %s of data\n') %
+                       (filecount, util.bytecount(bytecount)))
+        handled_bytes = 0
+        repo.ui.progress(_('clone'), 0, total=bytecount)
+        start = time.time()
+
+        tr = repo.transaction(_('clone'))
+        try:
+            for i in xrange(filecount):
+                # XXX doesn't support '\n' or '\r' in filenames
+                l = fp.readline()
+                try:
+                    name, size = l.split('\0', 1)
+                    size = int(size)
+                except (ValueError, TypeError):
+                    raise error.ResponseError(
+                        _('unexpected response from remote server:'), l)
+                if repo.ui.debugflag:
+                    repo.ui.debug('adding %s (%s)\n' %
+                                  (name, util.bytecount(size)))
+                # for backwards compat, name was partially encoded
+                ofp = repo.svfs(store.decodedir(name), 'w')
+                for chunk in util.filechunkiter(fp, limit=size):
+                    handled_bytes += len(chunk)
+                    repo.ui.progress(_('clone'), handled_bytes, total=bytecount)
+                    ofp.write(chunk)
+                ofp.close()
+            tr.close()
+        finally:
+            tr.release()
+
+        # Writing straight to files circumvented the inmemory caches
+        repo.invalidate()
+
+        elapsed = time.time() - start
+        if elapsed <= 0:
+            elapsed = 0.001
+        repo.ui.progress(_('clone'), None)
+        repo.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
+                       (util.bytecount(bytecount), elapsed,
+                        util.bytecount(bytecount / elapsed)))
+    finally:
+        lock.release()
+
+def applybundlev1(repo, fp):
+    """Apply the content from a stream clone bundle version 1.
+
+    We assume the 4 byte header has been read and validated and the file handle
+    is at the 2 byte compression identifier.
+    """
+    if len(repo):
+        raise error.Abort(_('cannot apply stream clone bundle on non-empty '
+                            'repo'))
+
+    compression = fp.read(2)
+    if compression != 'UN':
+        raise error.Abort(_('only uncompressed stream clone bundles are '
+            'supported; got %s') % compression)
+
+    filecount, bytecount = struct.unpack('>QQ', fp.read(16))
+    requireslen = struct.unpack('>H', fp.read(2))[0]
+    requires = fp.read(requireslen)
+
+    if not requires.endswith('\0'):
+        raise error.Abort(_('malformed stream clone bundle: '
+                            'requirements not properly encoded'))
+
+    requirements = set(requires.rstrip('\0').split(','))
+    missingreqs = requirements - repo.supportedformats
+    if missingreqs:
+        raise error.Abort(_('unable to apply stream clone: '
+                            'unsupported format: %s') %
+                            ', '.join(sorted(missingreqs)))
+
+    consumev1(repo, fp, filecount, bytecount)
+
+class streamcloneapplier(object):
+    """Class to manage applying streaming clone bundles.
+
+    We need to wrap ``applybundlev1()`` in a dedicated type to enable bundle
+    readers to perform bundle type-specific functionality.
+    """
+    def __init__(self, fh):
+        self._fh = fh
+
+    def apply(self, repo):
+        return applybundlev1(repo, self._fh)
--- a/mercurial/strutil.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/strutil.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,6 +5,8 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
+from __future__ import absolute_import
+
 def findall(haystack, needle, start=0, end=None):
     if end is None:
         end = len(haystack)
--- a/mercurial/subrepo.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/subrepo.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,15 +5,34 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
+from __future__ import absolute_import
+
 import copy
-import errno, os, re, posixpath, sys
+import errno
+import os
+import posixpath
+import re
+import stat
+import subprocess
+import sys
+import tarfile
 import xml.dom.minidom
-import stat, subprocess, tarfile
-from i18n import _
-import config, util, node, error, cmdutil, scmutil, match as matchmod
-import phases
-import pathutil
-import exchange
+
+
+from .i18n import _
+from . import (
+    cmdutil,
+    config,
+    error,
+    exchange,
+    match as matchmod,
+    node,
+    pathutil,
+    phases,
+    scmutil,
+    util,
+)
+
 hg = None
 propertycache = util.propertycache
 
@@ -76,7 +95,7 @@
                 return
             p.parse(f, data, sections, remap, read)
         else:
-            raise util.Abort(_("subrepo spec file \'%s\' not found") %
+            raise error.Abort(_("subrepo spec file \'%s\' not found") %
                              repo.pathto(f))
     if '.hgsub' in ctx:
         read('.hgsub')
@@ -94,7 +113,7 @@
                 try:
                     revision, path = l.split(" ", 1)
                 except ValueError:
-                    raise util.Abort(_("invalid subrepository revision "
+                    raise error.Abort(_("invalid subrepository revision "
                                        "specifier in \'%s\' line %d")
                                      % (repo.pathto('.hgsubstate'), (i + 1)))
                 rev[path] = revision
@@ -114,7 +133,7 @@
             try:
                 src = re.sub(pattern, repl, src, 1)
             except re.error as e:
-                raise util.Abort(_("bad subrepository pattern in %s: %s")
+                raise error.Abort(_("bad subrepository pattern in %s: %s")
                                  % (p.source('subpaths', pattern), e))
         return src
 
@@ -123,7 +142,7 @@
         kind = 'hg'
         if src.startswith('['):
             if ']' not in src:
-                raise util.Abort(_('missing ] in subrepo source'))
+                raise error.Abort(_('missing ] in subrepo source'))
             kind, src = src.split(']', 1)
             kind = kind[1:]
             src = src.lstrip() # strip any extra whitespace after ']'
@@ -305,7 +324,7 @@
             # chop off the .hg component to get the default path form
             return os.path.dirname(repo.sharedpath)
     if abort:
-        raise util.Abort(_("default path for subrepository not found"))
+        raise error.Abort(_("default path for subrepository not found"))
 
 def _sanitize(ui, vfs, ignore):
     for dirname, dirs, names in vfs.walk():
@@ -328,13 +347,13 @@
     # so we manually delay the circular imports to not break
     # scripts that don't use our demand-loading
     global hg
-    import hg as h
+    from . import hg as h
     hg = h
 
     pathutil.pathauditor(ctx.repo().root)(path)
     state = ctx.substate[path]
     if state[2] not in types:
-        raise util.Abort(_('unknown subrepo type %s') % state[2])
+        raise error.Abort(_('unknown subrepo type %s') % state[2])
     if allowwdir:
         state = (state[0], ctx.subrev(path), state[2])
     return types[state[2]](ctx, path, state[:2])
@@ -346,13 +365,13 @@
     # so we manually delay the circular imports to not break
     # scripts that don't use our demand-loading
     global hg
-    import hg as h
+    from . import hg as h
     hg = h
 
     pathutil.pathauditor(ctx.repo().root)(path)
     state = ctx.substate[path]
     if state[2] not in types:
-        raise util.Abort(_('unknown subrepo type %s') % state[2])
+        raise error.Abort(_('unknown subrepo type %s') % state[2])
     subrev = ''
     if state[2] == 'hg':
         subrev = "0" * 40
@@ -365,7 +384,7 @@
         return commitphase
     check = ui.config('phases', 'checksubrepos', 'follow')
     if check not in ('ignore', 'follow', 'abort'):
-        raise util.Abort(_('invalid phases.checksubrepos configuration: %s')
+        raise error.Abort(_('invalid phases.checksubrepos configuration: %s')
                          % (check))
     if check == 'ignore':
         return commitphase
@@ -379,7 +398,7 @@
             maxsub = s
     if commitphase < maxphase:
         if check == 'abort':
-            raise util.Abort(_("can't commit in %s phase"
+            raise error.Abort(_("can't commit in %s phase"
                                " conflicting %s from subrepository %s") %
                              (phases.phasenames[commitphase],
                               phases.phasenames[maxphase], maxsub))
@@ -399,7 +418,7 @@
         ``ctx`` is the context referring this subrepository in the
         parent repository.
 
-        ``path`` is the path to this subrepositiry as seen from
+        ``path`` is the path to this subrepository as seen from
         innermost repository.
         """
         self.ui = ctx.repo().ui
@@ -437,7 +456,7 @@
         """
         dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate)
         if dirtyreason:
-            raise util.Abort(dirtyreason)
+            raise error.Abort(dirtyreason)
 
     def basestate(self):
         """current working directory base state, disregarding .hgsubstate
@@ -1037,7 +1056,7 @@
 
     @propertycache
     def wvfs(self):
-        """return own wvfs for efficiency and consitency
+        """return own wvfs for efficiency and consistency
         """
         return self._repo.wvfs
 
@@ -1054,7 +1073,7 @@
         self._state = state
         self._exe = util.findexe('svn')
         if not self._exe:
-            raise util.Abort(_("'svn' executable not found for subrepo '%s'")
+            raise error.Abort(_("'svn' executable not found for subrepo '%s'")
                              % self._path)
 
     def _svncommand(self, commands, filename='', failok=False):
@@ -1089,7 +1108,8 @@
         stderr = stderr.strip()
         if not failok:
             if p.returncode:
-                raise util.Abort(stderr or 'exited with code %d' % p.returncode)
+                raise error.Abort(stderr or 'exited with code %d'
+                                  % p.returncode)
             if stderr:
                 self.ui.warn(stderr + '\n')
         return stdout, stderr
@@ -1099,7 +1119,7 @@
         output, err = self._svncommand(['--version', '--quiet'], filename=None)
         m = re.search(r'^(\d+)\.(\d+)', output)
         if not m:
-            raise util.Abort(_('cannot retrieve svn tool version'))
+            raise error.Abort(_('cannot retrieve svn tool version'))
         return (int(m.group(1)), int(m.group(2)))
 
     def _wcrevs(self):
@@ -1177,11 +1197,11 @@
             return self.basestate()
         if extchanged:
             # Do not try to commit externals
-            raise util.Abort(_('cannot commit svn externals'))
+            raise error.Abort(_('cannot commit svn externals'))
         if missing:
             # svn can commit with missing entries but aborting like hg
             # seems a better approach.
-            raise util.Abort(_('cannot commit missing svn entries'))
+            raise error.Abort(_('cannot commit missing svn entries'))
         commitinfo, err = self._svncommand(['commit', '-m', text])
         self.ui.status(commitinfo)
         newrev = re.search('Committed revision ([0-9]+).', commitinfo)
@@ -1191,8 +1211,8 @@
                 # svn one. For instance, svn ignores missing files
                 # when committing. If there are only missing files, no
                 # commit is made, no output and no error code.
-                raise util.Abort(_('failed to commit svn changes'))
-            raise util.Abort(commitinfo.splitlines()[-1])
+                raise error.Abort(_('failed to commit svn changes'))
+            raise error.Abort(commitinfo.splitlines()[-1])
         newrev = newrev.groups()[0]
         self.ui.status(self._svncommand(['update', '-r', newrev])[0])
         return newrev
@@ -1231,7 +1251,7 @@
                 self.remove()
                 self.get(state, overwrite=False)
                 return
-            raise util.Abort((status or err).splitlines()[-1])
+            raise error.Abort((status or err).splitlines()[-1])
         self.ui.status(status)
 
     @annotatesubrepoerror
@@ -1288,7 +1308,7 @@
         if versionstatus == 'unknown':
             self.ui.warn(_('cannot retrieve git version\n'))
         elif versionstatus == 'abort':
-            raise util.Abort(_('git subrepo requires at least 1.6.0 or later'))
+            raise error.Abort(_('git subrepo requires at least 1.6.0 or later'))
         elif versionstatus == 'warning':
             self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
 
@@ -1375,7 +1395,7 @@
             if command in ('cat-file', 'symbolic-ref'):
                 return retdata, p.returncode
             # for all others, abort
-            raise util.Abort('git %s error %d in %s' %
+            raise error.Abort('git %s error %d in %s' %
                              (command, p.returncode, self._relpath))
 
         return retdata, p.returncode
@@ -1472,7 +1492,7 @@
         # try only origin: the originally cloned repo
         self._gitcommand(['fetch'])
         if not self._githavelocally(revision):
-            raise util.Abort(_("revision %s does not exist in subrepo %s\n") %
+            raise error.Abort(_("revision %s does not exist in subrepo %s\n") %
                                (revision, self._relpath))
 
     @annotatesubrepoerror
@@ -1581,7 +1601,7 @@
     @annotatesubrepoerror
     def commit(self, text, user, date):
         if self._gitmissing():
-            raise util.Abort(_("subrepo %s is missing") % self._relpath)
+            raise error.Abort(_("subrepo %s is missing") % self._relpath)
         cmd = ['commit', '-a', '-m', text]
         env = os.environ.copy()
         if user:
@@ -1627,7 +1647,7 @@
         if not self._state[1]:
             return True
         if self._gitmissing():
-            raise util.Abort(_("subrepo %s is missing") % self._relpath)
+            raise error.Abort(_("subrepo %s is missing") % self._relpath)
         # if a branch in origin contains the revision, nothing to do
         branch2rev, rev2branch = self._gitbranchmap()
         if self._state[1] in rev2branch:
--- a/mercurial/tagmerge.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/tagmerge.py	Tue Oct 20 15:59:10 2015 -0500
@@ -71,11 +71,20 @@
 #         - put blocks whose nodes come all from p2 first
 #     - write the tag blocks in the sorted order
 
-import tags as tagsmod
-import util
-from node import nullid, hex
-from i18n import _
+from __future__ import absolute_import
+
 import operator
+
+from .i18n import _
+from .node import (
+    hex,
+    nullid,
+)
+from .import (
+    tags as tagsmod,
+    util,
+)
+
 hexnullid = hex(nullid)
 
 def readtagsformerge(ui, repo, lines, fn='', keeplinenums=False):
--- a/mercurial/tags.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/tags.py	Tue Oct 20 15:59:10 2015 -0500
@@ -10,15 +10,27 @@
 # Eventually, it could take care of updating (adding/removing/moving)
 # tags too.
 
-from node import nullid, bin, hex, short
-from i18n import _
-import util
-import encoding
-import error
-from array import array
+from __future__ import absolute_import
+
+import array
 import errno
 import time
 
+from .i18n import _
+from .node import (
+    bin,
+    hex,
+    nullid,
+    short,
+)
+from . import (
+    encoding,
+    error,
+    util,
+)
+
+array = array.array
+
 # Tags computation can be expensive and caches exist to make it fast in
 # the common case.
 #
@@ -263,7 +275,7 @@
     If the cache is not up to date, the caller is responsible for reading tag
     info from each returned head. (See findglobaltags().)
     '''
-    import scmutil  # avoid cycle
+    from . import scmutil  # avoid cycle
 
     try:
         cachefile = repo.vfs(_filename(repo), 'r')
--- a/mercurial/templatefilters.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/templatefilters.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,10 +5,21 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-import cgi, re, os, time, urllib
-import encoding, node, util
-import hbisect
-import templatekw
+from __future__ import absolute_import
+
+import cgi
+import os
+import re
+import time
+import urllib
+
+from . import (
+    encoding,
+    hbisect,
+    node,
+    templatekw,
+    util,
+)
 
 def addbreaks(text):
     """:addbreaks: Any text. Add an XHTML "<br />" tag before the end of
@@ -70,12 +81,6 @@
     """:count: List or text. Returns the length as an integer."""
     return len(i)
 
-def datefilter(text):
-    """:date: Date. Returns a date in a Unix date format, including the
-    timezone: "Mon Sep 04 15:13:13 2006 0700".
-    """
-    return util.datestr(text)
-
 def domain(author):
     """:domain: Any text. Finds the first string that looks like an email
     address, and extracts just the domain component. Example: ``User
@@ -230,10 +235,6 @@
         s = s.replace(k, v)
     return ''.join(_uescape(c) for c in s)
 
-def localdate(text):
-    """:localdate: Date. Converts a date to local date."""
-    return (util.parsedate(text)[0], util.makedate()[1])
-
 def lower(text):
     """:lower: Any text. Converts the text to lowercase."""
     return encoding.lower(text)
@@ -337,10 +338,6 @@
         return ""
     return str(thing)
 
-def strip(text):
-    """:strip: Any text. Strips all leading and trailing whitespace."""
-    return text.strip()
-
 def stripdir(text):
     """:stripdir: Treat the text as path and strip a directory level, if
     possible. For example, "foo" and "foo/bar" becomes "foo".
@@ -390,7 +387,6 @@
     "age": age,
     "basename": basename,
     "count": count,
-    "date": datefilter,
     "domain": domain,
     "email": email,
     "escape": escape,
@@ -403,7 +399,6 @@
     "isodatesec": isodatesec,
     "json": json,
     "jsonescape": jsonescape,
-    "localdate": localdate,
     "lower": lower,
     "nonempty": nonempty,
     "obfuscate": obfuscate,
@@ -418,7 +413,6 @@
     "splitlines": splitlines,
     "stringescape": stringescape,
     "stringify": stringify,
-    "strip": strip,
     "stripdir": stripdir,
     "tabindent": tabindent,
     "upper": upper,
--- a/mercurial/templatekw.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/templatekw.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,9 +5,16 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-from node import hex
-import patch, scmutil, util, error
-import hbisect
+from __future__ import absolute_import
+
+from .node import hex
+from . import (
+    error,
+    hbisect,
+    patch,
+    scmutil,
+    util,
+)
 
 # This helper class allows us to handle both:
 #  "{files}" (legacy command-line-specific list hack) and
@@ -115,14 +122,21 @@
         revcache['files'] = repo.status(ctx.p1(), ctx)[:3]
     return revcache['files']
 
-def getlatesttags(repo, ctx, cache):
+def getlatesttags(repo, ctx, cache, pattern=None):
     '''return date, distance and name for the latest tag of rev'''
 
-    if 'latesttags' not in cache:
+    cachename = 'latesttags'
+    if pattern is not None:
+        cachename += '-' + pattern
+        match = util.stringmatcher(pattern)[2]
+    else:
+        match = util.always
+
+    if cachename not in cache:
         # Cache mapping from rev to a tuple with tag date, tag
         # distance and tag name
-        cache['latesttags'] = {-1: (0, 0, ['null'])}
-    latesttags = cache['latesttags']
+        cache[cachename] = {-1: (0, 0, ['null'])}
+    latesttags = cache[cachename]
 
     rev = ctx.rev()
     todo = [rev]
@@ -132,7 +146,8 @@
             continue
         ctx = repo[rev]
         tags = [t for t in ctx.tags()
-                if (repo.tagtype(t) and repo.tagtype(t) != 'local')]
+                if (repo.tagtype(t) and repo.tagtype(t) != 'local'
+                    and match(t))]
         if tags:
             latesttags[rev] = ctx.date()[0], 0, [t for t in sorted(tags)]
             continue
@@ -198,7 +213,7 @@
 def showbranches(**args):
     """:branches: List of strings. The name of the branch on which the
     changeset was committed. Will be empty if the branch name was
-    default.
+    default. (DEPRECATED)
     """
     branch = args['ctx'].branch()
     if branch != 'default':
@@ -329,11 +344,27 @@
     """:latesttag: List of strings. The global tags on the most recent globally
     tagged ancestor of this changeset.
     """
+    return showlatesttags(None, **args)
+
+def showlatesttags(pattern, **args):
+    """helper method for the latesttag keyword and function"""
     repo, ctx = args['repo'], args['ctx']
     cache = args['cache']
-    latesttags = getlatesttags(repo, ctx, cache)[2]
+    latesttags = getlatesttags(repo, ctx, cache, pattern)
 
-    return showlist('latesttag', latesttags, separator=':', **args)
+    # latesttag[0] is an implementation detail for sorting csets on different
+    # branches in a stable manner- it is the date the tagged cset was created,
+    # not the date the tag was created.  Therefore it isn't made visible here.
+    makemap = lambda v: {
+        'changes': _showchangessincetag,
+        'distance': latesttags[1],
+        'latesttag': v,   # BC with {latesttag % '{latesttag}'}
+        'tag': v
+    }
+
+    tags = latesttags[2]
+    f = _showlist('latesttag', tags, separator=':', **args)
+    return _hybrid(f, tags, makemap, lambda x: x['latesttag'])
 
 def showlatesttagdistance(repo, ctx, templ, cache, **args):
     """:latesttagdistance: Integer. Longest path to the latest tag."""
@@ -342,15 +373,20 @@
 def showchangessincelatesttag(repo, ctx, templ, cache, **args):
     """:changessincelatesttag: Integer. All ancestors not in the latest tag."""
     latesttag = getlatesttags(repo, ctx, cache)[2][0]
+
+    return _showchangessincetag(repo, ctx, tag=latesttag, **args)
+
+def _showchangessincetag(repo, ctx, **args):
     offset = 0
     revs = [ctx.rev()]
+    tag = args['tag']
 
     # The only() revset doesn't currently support wdir()
     if ctx.rev() is None:
         offset = 1
         revs = [p.rev() for p in ctx.parents()]
 
-    return len(repo.revs('only(%ld, %s)', revs, latesttag)) + offset
+    return len(repo.revs('only(%ld, %s)', revs, tag)) + offset
 
 def showmanifest(**args):
     repo, ctx, templ = args['repo'], args['ctx'], args['templ']
@@ -390,6 +426,18 @@
     parent, all digits are 0."""
     return ctx.p2().hex()
 
+def showparents(**args):
+    """:parents: List of strings. The parents of the changeset in "rev:node"
+    format. If the changeset has only one "natural" parent (the predecessor
+    revision) nothing is shown."""
+    repo = args['repo']
+    ctx = args['ctx']
+    parents = [[('rev', p.rev()),
+                ('node', p.hex()),
+                ('phase', p.phasestr())]
+               for p in scmutil.meaningfulparents(repo, ctx)]
+    return showlist('parent', parents, **args)
+
 def showphase(repo, ctx, templ, **args):
     """:phase: String. The changeset phase name."""
     return ctx.phasestr()
@@ -402,6 +450,14 @@
     """:rev: Integer. The repository-local changeset revision number."""
     return scmutil.intrev(ctx.rev())
 
+def showrevslist(name, revs, **args):
+    """helper to generate a list of revisions in which a mapped template will
+    be evaluated"""
+    repo = args['ctx'].repo()
+    f = _showlist(name, revs, **args)
+    return _hybrid(f, revs,
+                   lambda x: {name: x, 'ctx': repo[x], 'revcache': {}})
+
 def showsubrepos(**args):
     """:subrepos: List of strings. Updated subrepositories in the changeset."""
     ctx = args['ctx']
@@ -470,6 +526,7 @@
     'p1node': showp1node,
     'p2rev': showp2rev,
     'p2node': showp2node,
+    'parents': showparents,
     'phase': showphase,
     'phaseidx': showphaseidx,
     'rev': showrev,
@@ -477,17 +534,5 @@
     'tags': showtags,
 }
 
-def _showparents(**args):
-    """:parents: List of strings. The parents of the changeset in "rev:node"
-    format. If the changeset has only one "natural" parent (the predecessor
-    revision) nothing is shown."""
-    pass
-
-dockeywords = {
-    'parents': _showparents,
-}
-dockeywords.update(keywords)
-del dockeywords['branches']
-
 # tell hggettext to extract docstrings from these functions:
-i18nfunctions = dockeywords.values()
+i18nfunctions = keywords.values()
--- a/mercurial/templater.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/templater.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,12 +5,23 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-from i18n import _
-import os, re
-import util, config, templatefilters, templatekw, parser, error
-import revset as revsetmod
+from __future__ import absolute_import
+
+import os
+import re
 import types
-import minirst
+
+from .i18n import _
+from . import (
+    config,
+    error,
+    minirst,
+    parser,
+    revset as revsetmod,
+    templatefilters,
+    templatekw,
+    util,
+)
 
 # template parsing
 
@@ -94,11 +105,8 @@
                     pos += 4 # skip over double escaped characters
                     continue
                 if program.startswith(quote, pos, end):
-                    try:
-                        # interpret as if it were a part of an outer string
-                        data = program[s:pos].decode('string-escape')
-                    except ValueError: # unbalanced escapes
-                        raise error.ParseError(_("syntax error"), s)
+                    # interpret as if it were a part of an outer string
+                    data = parser.unescapestr(program[s:pos])
                     if token == 'template':
                         data = _parsetemplate(data, 0, len(data))[0]
                     yield (token, data, s)
@@ -147,19 +155,18 @@
         n = min((tmpl.find(c, pos, stop) for c in sepchars),
                 key=lambda n: (n < 0, n))
         if n < 0:
-            parsed.append(('string', tmpl[pos:stop].decode('string-escape')))
+            parsed.append(('string', parser.unescapestr(tmpl[pos:stop])))
             pos = stop
             break
         c = tmpl[n]
         bs = (n - pos) - len(tmpl[pos:n].rstrip('\\'))
         if bs % 2 == 1:
             # escaped (e.g. '\{', '\\\{', but not '\\{')
-            parsed.append(('string',
-                           tmpl[pos:n - 1].decode('string-escape') + c))
+            parsed.append(('string', parser.unescapestr(tmpl[pos:n - 1]) + c))
             pos = n + 1
             continue
         if n > pos:
-            parsed.append(('string', tmpl[pos:n].decode('string-escape')))
+            parsed.append(('string', parser.unescapestr(tmpl[pos:n])))
         if c == quote:
             return parsed, n + 1
 
@@ -194,12 +201,6 @@
         return getlist(x[1]) + [x[2]]
     return [x]
 
-def getfilter(exp, context):
-    f = getsymbol(exp)
-    if f not in context._filters:
-        raise error.ParseError(_("unknown function '%s'") % f)
-    return context._filters[f]
-
 def gettemplate(exp, context):
     if exp[0] == 'template':
         return [compileexp(e, context, methods) for e in exp[1]]
@@ -210,6 +211,15 @@
         return context._load(exp[1])
     raise error.ParseError(_("expected template specifier"))
 
+def evalfuncarg(context, mapping, arg):
+    func, data = arg
+    # func() may return string, generator of strings or arbitrary object such
+    # as date tuple, but filter does not want generator.
+    thing = func(context, mapping, data)
+    if isinstance(thing, types.GeneratorType):
+        thing = stringify(thing)
+    return thing
+
 def runinteger(context, mapping, data):
     return int(data)
 
@@ -240,25 +250,27 @@
         yield func(context, mapping, data)
 
 def buildfilter(exp, context):
-    func, data = compileexp(exp[1], context, methods)
-    filt = getfilter(exp[2], context)
-    return (runfilter, (func, data, filt))
+    arg = compileexp(exp[1], context, methods)
+    n = getsymbol(exp[2])
+    if n in context._filters:
+        filt = context._filters[n]
+        return (runfilter, (arg, filt))
+    if n in funcs:
+        f = funcs[n]
+        return (f, [arg])
+    raise error.ParseError(_("unknown function '%s'") % n)
 
 def runfilter(context, mapping, data):
-    func, data, filt = data
-    # func() may return string, generator of strings or arbitrary object such
-    # as date tuple, but filter does not want generator.
-    thing = func(context, mapping, data)
-    if isinstance(thing, types.GeneratorType):
-        thing = stringify(thing)
+    arg, filt = data
+    thing = evalfuncarg(context, mapping, arg)
     try:
         return filt(thing)
     except (ValueError, AttributeError, TypeError):
-        if isinstance(data, tuple):
-            dt = data[1]
+        if isinstance(arg[1], tuple):
+            dt = arg[1][1]
         else:
-            dt = data
-        raise util.Abort(_("template filter '%s' is not compatible with "
+            dt = arg[1]
+        raise error.Abort(_("template filter '%s' is not compatible with "
                            "keyword '%s'") % (filt.func_name, dt))
 
 def buildmap(exp, context):
@@ -295,12 +307,13 @@
         if len(args) != 1:
             raise error.ParseError(_("filter %s expects one argument") % n)
         f = context._filters[n]
-        return (runfilter, (args[0][0], args[0][1], f))
+        return (runfilter, (args[0], f))
     raise error.ParseError(_("unknown function '%s'") % n)
 
 def date(context, mapping, args):
     """:date(date[, fmt]): Format a date. See :hg:`help dates` for formatting
-    strings."""
+    strings. The default is a Unix date format, including the timezone:
+    "Mon Sep 04 15:13:13 2006 0700"."""
     if not (1 <= len(args) <= 2):
         # i18n: "date" is a keyword
         raise error.ParseError(_("date expects one or two arguments"))
@@ -408,7 +421,7 @@
 def get(context, mapping, args):
     """:get(dict, key): Get an attribute/key from an object. Some keywords
     are complex types. This function allows you to obtain the value of an
-    attribute on these type."""
+    attribute on these types."""
     if len(args) != 2:
         # i18n: "get" is a keyword
         raise error.ParseError(_("get() expects two arguments"))
@@ -497,6 +510,47 @@
     # ignore args[0] (the label string) since this is supposed to be a a no-op
     yield args[1][0](context, mapping, args[1][1])
 
+def latesttag(context, mapping, args):
+    """:latesttag([pattern]): The global tags matching the given pattern on the
+    most recent globally tagged ancestor of this changeset."""
+    if len(args) > 1:
+        # i18n: "latesttag" is a keyword
+        raise error.ParseError(_("latesttag expects at most one argument"))
+
+    pattern = None
+    if len(args) == 1:
+        pattern = stringify(args[0][0](context, mapping, args[0][1]))
+
+    return templatekw.showlatesttags(pattern, **mapping)
+
+def localdate(context, mapping, args):
+    """:localdate(date[, tz]): Converts a date to the specified timezone.
+    The default is local date."""
+    if not (1 <= len(args) <= 2):
+        # i18n: "localdate" is a keyword
+        raise error.ParseError(_("localdate expects one or two arguments"))
+
+    date = evalfuncarg(context, mapping, args[0])
+    try:
+        date = util.parsedate(date)
+    except AttributeError:  # not str nor date tuple
+        # i18n: "localdate" is a keyword
+        raise error.ParseError(_("localdate expects a date information"))
+    if len(args) >= 2:
+        tzoffset = None
+        tz = evalfuncarg(context, mapping, args[1])
+        if isinstance(tz, str):
+            tzoffset = util.parsetimezone(tz)
+        if tzoffset is None:
+            try:
+                tzoffset = int(tz)
+            except (TypeError, ValueError):
+                # i18n: "localdate" is a keyword
+                raise error.ParseError(_("localdate expects a timezone"))
+    else:
+        tzoffset = util.makedate()[1]
+    return (date[0], tzoffset)
+
 def revset(context, mapping, args):
     """:revset(query[, formatargs...]): Execute a revision set query. See
     :hg:`help revset`."""
@@ -525,7 +579,7 @@
             revs = list([str(r) for r in revs])
             revsetcache[raw] = revs
 
-    return templatekw.showlist("revision", revs, **mapping)
+    return templatekw.showrevslist("revision", revs, **mapping)
 
 def rstdoc(context, mapping, args):
     """:rstdoc(text, style): Format ReStructuredText."""
@@ -591,7 +645,8 @@
                 return shortest
 
 def strip(context, mapping, args):
-    """:strip(text[, chars]): Strip characters from a string."""
+    """:strip(text[, chars]): Strip characters from a string. By default,
+    strips all leading and trailing whitespace."""
     if not (1 <= len(args) <= 2):
         # i18n: "strip" is a keyword
         raise error.ParseError(_("strip expects one or two arguments"))
@@ -612,7 +667,16 @@
     pat = stringify(args[0][0](context, mapping, args[0][1]))
     rpl = stringify(args[1][0](context, mapping, args[1][1]))
     src = stringify(args[2][0](context, mapping, args[2][1]))
-    yield re.sub(pat, rpl, src)
+    try:
+        patre = re.compile(pat)
+    except re.error:
+        # i18n: "sub" is a keyword
+        raise error.ParseError(_("sub got an invalid pattern: %s") % pat)
+    try:
+        yield patre.sub(rpl, src)
+    except re.error:
+        # i18n: "sub" is a keyword
+        raise error.ParseError(_("sub got an invalid replacement: %s") % rpl)
 
 def startswith(context, mapping, args):
     """:startswith(pattern, text): Returns the value from the "text" argument
@@ -680,6 +744,8 @@
     "indent": indent,
     "join": join,
     "label": label,
+    "latesttag": latesttag,
+    "localdate": localdate,
     "pad": pad,
     "revset": revset,
     "rstdoc": rstdoc,
@@ -738,9 +804,13 @@
     filter uses function to transform value. syntax is
     {key|filter1|filter2|...}.'''
 
-    def __init__(self, loader, filters={}, defaults={}):
+    def __init__(self, loader, filters=None, defaults=None):
         self._loader = loader
+        if filters is None:
+            filters = {}
         self._filters = filters
+        if defaults is None:
+            defaults = {}
         self._defaults = defaults
         self._cache = {}
 
@@ -770,17 +840,23 @@
             stylelist.append(split[1])
     return ", ".join(sorted(stylelist))
 
-class TemplateNotFound(util.Abort):
+class TemplateNotFound(error.Abort):
     pass
 
 class templater(object):
 
-    def __init__(self, mapfile, filters={}, defaults={}, cache={},
+    def __init__(self, mapfile, filters=None, defaults=None, cache=None,
                  minchunk=1024, maxchunk=65536):
         '''set up template engine.
         mapfile is name of file to read map definitions from.
         filters is dict of functions. each transforms a value into another.
         defaults is dict of default map definitions.'''
+        if filters is None:
+            filters = {}
+        if defaults is None:
+            defaults = {}
+        if cache is None:
+            cache = {}
         self.mapfile = mapfile or 'template'
         self.cache = cache.copy()
         self.map = {}
@@ -797,7 +873,7 @@
         if not mapfile:
             return
         if not os.path.exists(mapfile):
-            raise util.Abort(_("style '%s' not found") % mapfile,
+            raise error.Abort(_("style '%s' not found") % mapfile,
                              hint=_("available styles: %s") % stylelist())
 
         conf = config.config(includepaths=templatepaths())
--- a/mercurial/templates/atom/error.tmpl	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/templates/atom/error.tmpl	Tue Oct 20 15:59:10 2015 -0500
@@ -7,7 +7,7 @@
  <updated>1970-01-01T00:00:00+00:00</updated>
  <entry>
   <title>Error</title>
-  <id>http://mercurial.selenic.com/#error</id>
+  <id>https://mercurial-scm.org/#error</id>
   <author>
     <name>mercurial</name>
   </author>
--- a/mercurial/templates/coal/header.tmpl	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/templates/coal/header.tmpl	Tue Oct 20 15:59:10 2015 -0500
@@ -3,5 +3,6 @@
 <head>
 <link rel="icon" href="{staticurl|urlescape}hgicon.png" type="image/png" />
 <meta name="robots" content="index, nofollow" />
-<link rel="stylesheet" href="{staticurl|urlescape}style-coal.css" type="text/css" />
+<link rel="stylesheet" href="{staticurl|urlescape}style-paper.css" type="text/css" />
+<link rel="stylesheet" href="{staticurl|urlescape}style-extra-coal.css" type="text/css" />
 <script type="text/javascript" src="{staticurl|urlescape}mercurial.js"></script>
--- a/mercurial/templates/coal/map	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/templates/coal/map	Tue Oct 20 15:59:10 2015 -0500
@@ -1,7 +1,5 @@
-default = 'shortlog'
+%include paper/map
 
-mimetype = 'text/html; charset={encoding}'
-header = header.tmpl
 footer = ../paper/footer.tmpl
 search = ../paper/search.tmpl
 
@@ -13,23 +11,6 @@
 help = ../paper/help.tmpl
 helptopics = ../paper/helptopics.tmpl
 
-helpentry = '
-  <tr><td>
-    <a href="{url|urlescape}help/{topic|escape}{sessionvars%urlparameter}">
-      {topic|escape}
-    </a>
-  </td><td>
-    {summary|escape}
-  </td></tr>'
-
-naventry = '<a href="{url|urlescape}log/{node|short}{sessionvars%urlparameter}">{label|escape}</a> '
-navshortentry = '<a href="{url|urlescape}shortlog/{node|short}{sessionvars%urlparameter}">{label|escape}</a> '
-navgraphentry = '<a href="{url|urlescape}graph/{node|short}{sessionvars%urlparameter}">{label|escape}</a> '
-filenaventry = '<a href="{url|urlescape}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{label|escape}</a> '
-filedifflink = '<a href="{url|urlescape}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{file|escape}</a> '
-filenodelink = '<a href="{url|urlescape}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{file|escape}</a> '
-filenolink = '{file|escape} '
-fileellipses = '...'
 diffstatlink = ../paper/diffstat.tmpl
 diffstatnolink = ../paper/diffstat.tmpl
 changelogentry = ../paper/shortlogentry.tmpl
@@ -37,212 +18,17 @@
 changeset = ../paper/changeset.tmpl
 manifest = ../paper/manifest.tmpl
 
-nav = '{before%naventry} {after%naventry}'
-navshort = '{before%navshortentry}{after%navshortentry}'
-navgraph = '{before%navgraphentry}{after%navgraphentry}'
-filenav = '{before%filenaventry}{after%filenaventry}'
-
-direntry = '
-  <tr class="fileline">
-    <td class="name">
-      <a href="{url|urlescape}file/{symrev}{path|urlescape}{sessionvars%urlparameter}">
-        <img src="{staticurl|urlescape}coal-folder.png" alt="dir."/> {basename|escape}/
-      </a>
-      <a href="{url|urlescape}file/{symrev}{path|urlescape}/{emptydirs|urlescape}{sessionvars%urlparameter}">
-        {emptydirs|escape}
-      </a>
-    </td>
-    <td class="size"></td>
-    <td class="permissions">drwxr-xr-x</td>
-  </tr>'
-
-fileentry = '
-  <tr class="fileline">
-    <td class="filename">
-      <a href="{url|urlescape}file/{symrev}/{file|urlescape}{sessionvars%urlparameter}">
-        <img src="{staticurl|urlescape}coal-file.png" alt="file"/> {basename|escape}
-      </a>
-    </td>
-    <td class="size">{size}</td>
-    <td class="permissions">{permissions|permissions}</td>
-  </tr>'
-
 filerevision = ../paper/filerevision.tmpl
 fileannotate = ../paper/fileannotate.tmpl
 filediff = ../paper/filediff.tmpl
 filecomparison = ../paper/filecomparison.tmpl
 filelog = ../paper/filelog.tmpl
-fileline = '
-  <div class="source"><a href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</div>'
 filelogentry = ../paper/filelogentry.tmpl
 
-annotateline = '
-  <tr>
-    <td class="annotate">
-      <a href="{url|urlescape}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}#{targetline}"
-         title="{node|short}: {desc|escape|firstline}">{author|user}@{rev}</a>
-    </td>
-    <td class="source"><a href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</td>
-  </tr>'
-
-diffblock = '<div class="source bottomline"><pre>{lines}</pre></div>'
-difflineplus = '<a href="#{lineid}" id="{lineid}">{linenumber}</a> <span class="plusline">{line|escape}</span>'
-difflineminus = '<a href="#{lineid}" id="{lineid}">{linenumber}</a> <span class="minusline">{line|escape}</span>'
-difflineat = '<a href="#{lineid}" id="{lineid}">{linenumber}</a> <span class="atline">{line|escape}</span>'
-diffline = '<a href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}'
-
-comparisonblock ='
-  <tbody class="block">
-  {lines}
-  </tbody>'
-comparisonline = '
-  <tr>
-    <td class="source {type}"><a href="#{lineid}" id="{lineid}">{leftlinenumber}</a> {leftline|escape}</td>
-    <td class="source {type}"><a href="#{lineid}" id="{lineid}">{rightlinenumber}</a> {rightline|escape}</td>
-  </tr>'
-
-changelogparent = '
-  <tr>
-    <th class="parent">parent {rev}:</th>
-    <td class="parent"><a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td>
-  </tr>'
-
-changesetparent = '<a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a> '
-
-changesetparentdiff = '
-  {changesetparent}
-  {ifeq(node, basenode, '(current diff)', '({difffrom})')}'
-
-difffrom = '<a href="{url|urlescape}rev/{node|short}:{originalnode|short}{sessionvars%urlparameter}">diff</a>'
-
-filerevparent = '<a href="{url|urlescape}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{rename%filerename}{node|short}</a> '
-filerevchild = '<a href="{url|urlescape}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a> '
+tags = ../paper/tags.tmpl
+bookmarks = ../paper/bookmarks.tmpl
+branches = ../paper/branches.tmpl
 
-filerename = '{file|escape}@'
-filelogrename = '
-  <span class="base">
-    base
-    <a href="{url|urlescape}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">
-      {file|escape}@{node|short}
-    </a>
-  </span>'
-fileannotateparent = '
-  <tr>
-    <td class="metatag">parent:</td>
-    <td>
-      <a href="{url|urlescape}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">
-        {rename%filerename}{node|short}
-      </a>
-    </td>
-  </tr>'
-changesetchild = ' <a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a>'
-changelogchild = '
-  <tr>
-    <th class="child">child</th>
-    <td class="child">
-      <a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">
-        {node|short}
-      </a>
-    </td>
-  </tr>'
-fileannotatechild = '
-  <tr>
-    <td class="metatag">child:</td>
-    <td>
-      <a href="{url|urlescape}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">
-        {node|short}
-      </a>
-    </td>
-  </tr>'
-tags = ../paper/tags.tmpl
-tagentry = '
-  <tr class="tagEntry">
-    <td>
-      <a href="{url|urlescape}rev/{tag|revescape}{sessionvars%urlparameter}">
-        {tag|escape}
-      </a>
-    </td>
-    <td class="node">
-      <a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">
-        {node|short}
-      </a>
-    </td>
-  </tr>'
-bookmarks = ../paper/bookmarks.tmpl
-bookmarkentry = '
-  <tr class="tagEntry">
-    <td>
-      <a href="{url|urlescape}rev/{bookmark|revescape}{sessionvars%urlparameter}">
-        {bookmark|escape}
-      </a>
-    </td>
-    <td class="node">
-      <a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">
-        {node|short}
-      </a>
-    </td>
-  </tr>'
-branches = ../paper/branches.tmpl
-branchentry = '
-  <tr class="tagEntry">
-    <td>
-      <a href="{url|urlescape}shortlog/{branch|revescape}{sessionvars%urlparameter}" class="{status}">
-        {branch|escape}
-      </a>
-    </td>
-    <td class="node">
-      <a href="{url|urlescape}shortlog/{node|short}{sessionvars%urlparameter}" class="{status}">
-        {node|short}
-      </a>
-    </td>
-  </tr>'
-changelogtag = '<span class="tag">{name|escape}</span> '
-changesettag = '<span class="tag">{tag|escape}</span> '
-changesetbookmark = '<span class="tag">{bookmark|escape}</span> '
-changelogbranchhead = '<span class="branchhead">{name|escape}</span> '
-changelogbranchname = '<span class="branchname">{name|escape}</span> '
-
-filediffparent = '
-  <tr>
-    <th class="parent">parent {rev}:</th>
-    <td class="parent"><a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td>
-  </tr>'
-filelogparent = '
-  <tr>
-    <th>parent {rev}:</th>
-    <td><a href="{url|urlescape}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td>
-  </tr>'
-filediffchild = '
-  <tr>
-    <th class="child">child {rev}:</th>
-    <td class="child"><a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a>
-  </td>
-  </tr>'
-filelogchild = '
-  <tr>
-    <th>child {rev}:</th>
-    <td><a href="{url|urlescape}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td>
-  </tr>'
-
-indexentry = '
-  <tr>
-    <td><a href="{url|urlescape}{sessionvars%urlparameter}">{name|escape}</a></td>
-    <td>{description}</td>
-    <td>{contact|obfuscate}</td>
-    <td class="age">{lastchange|rfc822date}</td>
-    <td class="indexlinks">{archives%indexarchiveentry}</td>
-  </tr>\n'
-indexarchiveentry = '<a href="{url|urlescape}archive/{node|short}{extension|urlescape}">&nbsp;&darr;{type|escape}</a>'
 index = ../paper/index.tmpl
-archiveentry = '
-  <li>
-    <a href="{url|urlescape}archive/{symrev}{extension|urlescape}{ifeq(path,'/','',path|urlescape)}">{type|escape}</a>
-  </li>'
 notfound = ../paper/notfound.tmpl
 error = ../paper/error.tmpl
-urlparameter = '{separator}{name}={value|urlescape}'
-hiddenformentry = '<input type="hidden" name="{name}" value="{value|escape}" />'
-breadcrumb = '&gt; <a href="{url|urlescape}">{name|escape}</a> '
-
-searchhint = 'Find changesets by keywords (author, files, the commit message), revision
-  number or hash, or <a href="{url|urlescape}help/revsets">revset expression</a>.'
--- a/mercurial/templates/gitweb/changelog.tmpl	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/templates/gitweb/changelog.tmpl	Tue Oct 20 15:59:10 2015 -0500
@@ -23,7 +23,7 @@
 <a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
 <a href="{url|urlescape}shortlog/{symrev}{sessionvars%urlparameter}">shortlog</a> |
 changelog |
-<a href="{url|urlescape}graph{sessionvars%urlparameter}">graph</a> |
+<a href="{url|urlescape}graph/{symrev}{sessionvars%urlparameter}">graph</a> |
 <a href="{url|urlescape}tags{sessionvars%urlparameter}">tags</a> |
 <a href="{url|urlescape}bookmarks{sessionvars%urlparameter}">bookmarks</a> |
 <a href="{url|urlescape}branches{sessionvars%urlparameter}">branches</a> |
--- a/mercurial/templates/gitweb/changeset.tmpl	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/templates/gitweb/changeset.tmpl	Tue Oct 20 15:59:10 2015 -0500
@@ -16,7 +16,7 @@
 <a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
 <a href="{url|urlescape}shortlog/{symrev}{sessionvars%urlparameter}">shortlog</a> |
 <a href="{url|urlescape}log/{symrev}{sessionvars%urlparameter}">changelog</a> |
-<a href="{url|urlescape}graph{sessionvars%urlparameter}">graph</a> |
+<a href="{url|urlescape}graph/{symrev}{sessionvars%urlparameter}">graph</a> |
 <a href="{url|urlescape}tags{sessionvars%urlparameter}">tags</a> |
 <a href="{url|urlescape}bookmarks{sessionvars%urlparameter}">bookmarks</a> |
 <a href="{url|urlescape}branches{sessionvars%urlparameter}">branches</a> |
@@ -52,6 +52,6 @@
 {files}
 </table></div>
 
-<div class="page_body">{diff}</div>
+<div class="page_body diffblocks">{diff}</div>
 
 {footer}
--- a/mercurial/templates/gitweb/filerevision.tmpl	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/templates/gitweb/filerevision.tmpl	Tue Oct 20 15:59:10 2015 -0500
@@ -64,7 +64,7 @@
 </div>
 
 <div class="page_body">
-{text%fileline}
+<pre class="sourcelines stripes">{text%fileline}</pre>
 </div>
 
 {footer}
--- a/mercurial/templates/gitweb/graph.tmpl	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/templates/gitweb/graph.tmpl	Tue Oct 20 15:59:10 2015 -0500
@@ -21,7 +21,7 @@
 </form>
 <div class="page_nav">
 <a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
-<a href="{url|urlescape}shortlog{sessionvars%urlparameter}">shortlog</a> |
+<a href="{url|urlescape}shortlog/{symrev}{sessionvars%urlparameter}">shortlog</a> |
 <a href="{url|urlescape}log/{symrev}{sessionvars%urlparameter}">changelog</a> |
 graph |
 <a href="{url|urlescape}tags{sessionvars%urlparameter}">tags</a> |
--- a/mercurial/templates/gitweb/map	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/templates/gitweb/map	Tue Oct 20 15:59:10 2015 -0500
@@ -93,31 +93,33 @@
 filecomparison = filecomparison.tmpl
 filelog = filelog.tmpl
 fileline = '
-  <div style="font-family:monospace" class="parity{parity}">
-    <pre><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</pre>
-  </div>'
+  <a href="#{lineid}"></a><span id="{lineid}">{strip(line|escape, '\r\n')}</span>'
 annotateline = '
-  <tr style="font-family:monospace" class="parity{parity}">
+  <tr id="{lineid}" style="font-family:monospace" class="parity{parity}">
     <td class="linenr" style="text-align: right;">
       <a href="{url|urlescape}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}#l{targetline}"
          title="{node|short}: {desc|escape|firstline}">{author|user}@{rev}</a>
     </td>
-    <td><pre><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a></pre></td>
+    <td><pre><a class="linenr" href="#{lineid}">{linenumber}</a></pre></td>
     <td><pre>{line|escape}</pre></td>
   </tr>'
-difflineplus = '<span class="difflineplus"><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</span>'
-difflineminus = '<span class="difflineminus"><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</span>'
-difflineat = '<span class="difflineat"><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</span>'
-diffline = '<a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}'
+difflineplus = '
+  <a href="#{lineid}"></a><span id="{lineid}" class="difflineplus">{strip(line|escape, '\r\n')}</span>'
+difflineminus = '
+  <a href="#{lineid}"></a><span id="{lineid}" class="difflineminus">{strip(line|escape, '\r\n')}</span>'
+difflineat = '
+  <a href="#{lineid}"></a><span id="{lineid}" class="difflineat">{strip(line|escape, '\r\n')}</span>'
+diffline = '
+  <a href="#{lineid}"></a><span id="{lineid}">{strip(line|escape, '\r\n')}</span>'
 
 comparisonblock ='
   <tbody class="block">
   {lines}
   </tbody>'
 comparisonline = '
-  <tr style="font-family:monospace">
-    <td class="{type}"><pre><a class="linenr" href="#{lineid}" id="{lineid}">{leftlinenumber}</a> {leftline|escape}</pre></td>
-    <td class="{type}"><pre><a class="linenr" href="#{lineid}" id="{lineid}">{rightlinenumber}</a> {rightline|escape}</pre></td>
+  <tr id="{lineid}" style="font-family:monospace">
+    <td class="{type}"><pre><a class="linenr" href="#{lineid}">{leftlinenumber}</a> {leftline|escape}</pre></td>
+    <td class="{type}"><pre><a class="linenr" href="#{lineid}">{rightlinenumber}</a> {rightline|escape}</pre></td>
   </tr>'
 
 changelogparent = '
@@ -193,7 +195,7 @@
 tagentry = '
   <tr class="parity{parity}">
     <td class="age"><i class="age">{date|rfc822date}</i></td>
-    <td><a class="list" href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}"><b>{tag|escape}</b></a></td>
+    <td><a class="list" href="{url|urlescape}rev/{tag|revescape}{sessionvars%urlparameter}"><b>{tag|escape}</b></a></td>
     <td class="link">
       <a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">changeset</a> |
       <a href="{url|urlescape}log/{node|short}{sessionvars%urlparameter}">changelog</a> |
@@ -204,7 +206,7 @@
 bookmarkentry = '
   <tr class="parity{parity}">
     <td class="age"><i class="age">{date|rfc822date}</i></td>
-    <td><a class="list" href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}"><b>{bookmark|escape}</b></a></td>
+    <td><a class="list" href="{url|urlescape}rev/{bookmark|revescape}{sessionvars%urlparameter}"><b>{bookmark|escape}</b></a></td>
     <td class="link">
       <a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">changeset</a> |
       <a href="{url|urlescape}log/{node|short}{sessionvars%urlparameter}">changelog</a> |
@@ -215,15 +217,14 @@
 branchentry = '
   <tr class="parity{parity}">
     <td class="age"><i class="age">{date|rfc822date}</i></td>
-    <td><a class="list" href="{url|urlescape}shortlog/{node|short}{sessionvars%urlparameter}"><b>{node|short}</b></a></td>
-    <td class="{status}">{branch|escape}</td>
+    <td class="{status}"><a class="list" href="{url|urlescape}shortlog/{branch|revescape}{sessionvars%urlparameter}"><b>{branch|escape}</b></a></td>
     <td class="link">
       <a href="{url|urlescape}changeset/{node|short}{sessionvars%urlparameter}">changeset</a> |
       <a href="{url|urlescape}log/{node|short}{sessionvars%urlparameter}">changelog</a> |
       <a href="{url|urlescape}file/{node|short}{sessionvars%urlparameter}">files</a>
     </td>
   </tr>'
-diffblock = '<pre>{lines}</pre>'
+diffblock = '<div class="diffblock"><pre class="sourcelines">{lines}</pre></div>'
 filediffparent = '
   <tr>
     <td>parent {rev}</td>
@@ -290,6 +291,7 @@
 filelogentry = '
   <tr class="parity{parity}">
     <td class="age"><i class="age">{date|rfc822date}</i></td>
+    <td><i>{author|person}</i></td>
     <td>
       <a class="list" href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">
         <b>{desc|strip|firstline|escape|nonempty}</b>
--- a/mercurial/templates/gitweb/shortlog.tmpl	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/templates/gitweb/shortlog.tmpl	Tue Oct 20 15:59:10 2015 -0500
@@ -22,7 +22,7 @@
 <a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
 shortlog |
 <a href="{url|urlescape}log/{symrev}{sessionvars%urlparameter}">changelog</a> |
-<a href="{url|urlescape}graph{sessionvars%urlparameter}">graph</a> |
+<a href="{url|urlescape}graph/{symrev}{sessionvars%urlparameter}">graph</a> |
 <a href="{url|urlescape}tags{sessionvars%urlparameter}">tags</a> |
 <a href="{url|urlescape}bookmarks{sessionvars%urlparameter}">bookmarks</a> |
 <a href="{url|urlescape}branches{sessionvars%urlparameter}">branches</a> |
--- a/mercurial/templates/map-cmdline.xml	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/templates/map-cmdline.xml	Tue Oct 20 15:59:10 2015 -0500
@@ -1,5 +1,5 @@
-header = '<?xml version="1.0"?>\n<log>\n'
-footer = '</log>\n'
+docheader = '<?xml version="1.0"?>\n<log>\n'
+docfooter = '</log>\n'
 
 changeset = '<logentry revision="{rev}" node="{node}">\n{branches}{bookmarks}{tags}{parents}<author email="{author|email|xmlescape}">{author|person|xmlescape}</author>\n<date>{date|rfc3339date}</date>\n<msg xml:space="preserve">{desc|xmlescape}</msg>\n</logentry>\n'
 changeset_verbose = '<logentry revision="{rev}" node="{node}">\n{branches}{bookmarks}{tags}{parents}<author email="{author|email|xmlescape}">{author|person|xmlescape}</author>\n<date>{date|rfc3339date}</date>\n<msg xml:space="preserve">{desc|xmlescape}</msg>\n<paths>\n{file_adds}{file_dels}{file_mods}</paths>\n{file_copies}</logentry>\n'
--- a/mercurial/templates/monoblue/changeset.tmpl	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/templates/monoblue/changeset.tmpl	Tue Oct 20 15:59:10 2015 -0500
@@ -59,7 +59,7 @@
     {files}
     </table>
 
-    <div class="diff">
+    <div class="diff diffblocks">
     {diff}
     </div>
 
--- a/mercurial/templates/monoblue/fileannotate.tmpl	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/templates/monoblue/fileannotate.tmpl	Tue Oct 20 15:59:10 2015 -0500
@@ -32,6 +32,7 @@
 
     <ul class="submenu">
         <li><a href="{url|urlescape}file/{symrev}/{file|urlescape}{sessionvars%urlparameter}">file</a></li>
+        <li><a href="{url|urlescape}file/tip/{file|urlescape}{sessionvars%urlparameter}">latest</a></li>
         <li><a href="{url|urlescape}log/{symrev}/{file|urlescape}{sessionvars%urlparameter}">revisions</a></li>
         <li class="current">annotate</li>
         <li><a href="{url|urlescape}diff/{symrev}/{file|urlescape}{sessionvars%urlparameter}">diff</a></li>
--- a/mercurial/templates/monoblue/filecomparison.tmpl	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/templates/monoblue/filecomparison.tmpl	Tue Oct 20 15:59:10 2015 -0500
@@ -32,6 +32,7 @@
 
     <ul class="submenu">
         <li><a href="{url|urlescape}file/{symrev}/{file|urlescape}{sessionvars%urlparameter}">file</a></li>
+        <li><a href="{url|urlescape}file/tip/{file|urlescape}{sessionvars%urlparameter}">latest</a></li>
         <li><a href="{url|urlescape}log/{symrev}/{file|urlescape}{sessionvars%urlparameter}">revisions</a></li>
         <li><a href="{url|urlescape}annotate/{symrev}/{file|urlescape}{sessionvars%urlparameter}">annotate</a></li>
         <li><a href="{url|urlescape}diff/{symrev}/{file|urlescape}{sessionvars%urlparameter}">diff</a></li>
--- a/mercurial/templates/monoblue/filediff.tmpl	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/templates/monoblue/filediff.tmpl	Tue Oct 20 15:59:10 2015 -0500
@@ -32,6 +32,7 @@
 
     <ul class="submenu">
         <li><a href="{url|urlescape}file/{symrev}/{file|urlescape}{sessionvars%urlparameter}">file</a></li>
+        <li><a href="{url|urlescape}file/tip/{file|urlescape}{sessionvars%urlparameter}">latest</a></li>
         <li><a href="{url|urlescape}log/{symrev}/{file|urlescape}{sessionvars%urlparameter}">revisions</a></li>
         <li><a href="{url|urlescape}annotate/{symrev}/{file|urlescape}{sessionvars%urlparameter}">annotate</a></li>
         <li class="current">diff</li>
@@ -50,7 +51,7 @@
         {child%filediffchild}
     </dl>
 
-    <div class="diff">
+    <div class="diff diffblocks">
     {diff}
     </div>
 
--- a/mercurial/templates/monoblue/filerevision.tmpl	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/templates/monoblue/filerevision.tmpl	Tue Oct 20 15:59:10 2015 -0500
@@ -32,6 +32,7 @@
 
     <ul class="submenu">
         <li class="current">file</li>
+        <li><a href="{url|urlescape}file/tip/{file|urlescape}{sessionvars%urlparameter}">latest</a></li>
         <li><a href="{url|urlescape}log/{symrev}/{file|urlescape}{sessionvars%urlparameter}">revisions</a></li>
         <li><a href="{url|urlescape}annotate/{symrev}/{file|urlescape}{sessionvars%urlparameter}">annotate</a></li>
         <li><a href="{url|urlescape}diff/{symrev}/{file|urlescape}{sessionvars%urlparameter}">diff</a></li>
@@ -60,7 +61,7 @@
     <p class="description">{desc|strip|escape|websub|addbreaks|nonempty}</p>
 
     <div class="source">
-    {text%fileline}
+        <pre class="sourcelines stripes">{text%fileline}</pre>
     </div>
 
 {footer}
--- a/mercurial/templates/monoblue/footer.tmpl	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/templates/monoblue/footer.tmpl	Tue Oct 20 15:59:10 2015 -0500
@@ -12,11 +12,6 @@
         <p><a href="{logourl}" title="Mercurial"><img src="{staticurl|urlescape}{logoimg}" width=75 height=90 border=0 alt="mercurial" /></a></p>
     </div>
 
-    <div id="corner-top-left"></div>
-    <div id="corner-top-right"></div>
-    <div id="corner-bottom-left"></div>
-    <div id="corner-bottom-right"></div>
-
 </div>
 
 </body>
--- a/mercurial/templates/monoblue/index.tmpl	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/templates/monoblue/index.tmpl	Tue Oct 20 15:59:10 2015 -0500
@@ -29,11 +29,6 @@
         <p><a href="{logourl}" title="Mercurial"><img src="{staticurl|urlescape}{logoimg}" width=75 height=90 border=0 alt="mercurial"></a></p>
     </div>
 
-    <div id="corner-top-left"></div>
-    <div id="corner-top-right"></div>
-    <div id="corner-bottom-left"></div>
-    <div id="corner-bottom-right"></div>
-
 </div>
 <script type="text/javascript">process_dates()</script>
 </body>
--- a/mercurial/templates/monoblue/map	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/templates/monoblue/map	Tue Oct 20 15:59:10 2015 -0500
@@ -89,33 +89,35 @@
 filecomparison = filecomparison.tmpl
 filelog = filelog.tmpl
 fileline = '
-  <div style="font-family:monospace" class="parity{parity}">
-    <pre><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</pre>
-  </div>'
+  <a href="#{lineid}"></a><span id="{lineid}">{strip(line|escape, '\r\n')}</span>'
 annotateline = '
-  <tr class="parity{parity}">
+  <tr id="{lineid}" class="parity{parity}">
     <td class="linenr">
       <a href="{url|urlescape}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}#l{targetline}"
          title="{node|short}: {desc|escape|firstline}">{author|user}@{rev}</a>
     </td>
     <td class="lineno">
-      <a href="#{lineid}" id="{lineid}">{linenumber}</a>
+      <a href="#{lineid}">{linenumber}</a>
     </td>
     <td class="source">{line|escape}</td>
   </tr>'
-difflineplus = '<span class="difflineplus"><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</span>'
-difflineminus = '<span class="difflineminus"><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</span>'
-difflineat = '<span class="difflineat"><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</span>'
-diffline = '<span><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</span>'
+difflineplus = '
+  <a href="#{lineid}"></a><span id="{lineid}" class="difflineplus">{strip(line|escape, '\r\n')}</span>'
+difflineminus = '
+  <a href="#{lineid}"></a><span id="{lineid}" class="difflineminus">{strip(line|escape, '\r\n')}</span>'
+difflineat = '
+  <a href="#{lineid}"></a><span id="{lineid}" class="difflineat">{strip(line|escape, '\r\n')}</span>'
+diffline = '
+  <a href="#{lineid}"></a><span id="{lineid}">{strip(line|escape, '\r\n')}</span>'
 
 comparisonblock ='
   <tbody class="block">
   {lines}
   </tbody>'
 comparisonline = '
-  <tr>
-    <td class="source {type}"><a class="linenr" href="#{lineid}" id="{lineid}">{leftlinenumber}</a> {leftline|escape}</td>
-    <td class="source {type}"><a class="linenr" href="#{lineid}" id="{lineid}">{rightlinenumber}</a> {rightline|escape}</td>
+  <tr id="{lineid}">
+    <td class="source {type}"><a class="linenr" href="#{lineid}">{leftlinenumber}</a> {leftline|escape}</td>
+    <td class="source {type}"><a class="linenr" href="#{lineid}">{rightlinenumber}</a> {rightline|escape}</td>
   </tr>'
 
 changesetlink = '<a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a>'
@@ -171,7 +173,7 @@
 tagentry = '
   <tr class="parity{parity}">
     <td class="nowrap age">{date|rfc822date}</td>
-    <td><a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{tag|escape}</a></td>
+    <td><a href="{url|urlescape}rev/{tag|revescape}{sessionvars%urlparameter}">{tag|escape}</a></td>
     <td class="nowrap">
       <a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">changeset</a> |
       <a href="{url|urlescape}log/{node|short}{sessionvars%urlparameter}">changelog</a> |
@@ -182,7 +184,7 @@
 bookmarkentry = '
   <tr class="parity{parity}">
     <td class="nowrap age">{date|rfc822date}</td>
-    <td><a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{bookmark|escape}</a></td>
+    <td><a href="{url|urlescape}rev/{bookmark|revescape}{sessionvars%urlparameter}">{bookmark|escape}</a></td>
     <td class="nowrap">
       <a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">changeset</a> |
       <a href="{url|urlescape}log/{node|short}{sessionvars%urlparameter}">changelog</a> |
@@ -193,15 +195,14 @@
 branchentry = '
   <tr class="parity{parity}">
     <td class="nowrap age">{date|rfc822date}</td>
-    <td><a href="{url|urlescape}shortlog/{node|short}{sessionvars%urlparameter}">{node|short}</a></td>
-    <td class="{status}">{branch|escape}</td>
+    <td class="{status}"><a href="{url|urlescape}shortlog/{branch|revescape}{sessionvars%urlparameter}">{branch|escape}</a></td>
     <td class="nowrap">
       <a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">changeset</a> |
       <a href="{url|urlescape}log/{node|short}{sessionvars%urlparameter}">changelog</a> |
       <a href="{url|urlescape}file/{node|short}{sessionvars%urlparameter}">files</a>
     </td>
   </tr>'
-diffblock = '<pre>{lines}</pre>'
+diffblock = '<div class="diffblock"><pre class="sourcelines">{lines}</pre></div>'
 filediffparent = '
   <dt>parent {rev}</dt>
   <dd><a href="{url|urlescape}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></dd>'
@@ -247,6 +248,7 @@
 filelogentry = '
   <tr class="parity{parity}">
     <td class="nowrap age">{date|rfc822date}</td>
+    <td>{author|person}</td>
     <td>
       <a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">
         {desc|strip|firstline|escape|nonempty}
@@ -254,7 +256,9 @@
       </a>
     </td>
     <td class="nowrap">
-      <a href="{url|urlescape}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a>&nbsp;|&nbsp;<a href="{url|urlescape}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a>&nbsp;|&nbsp;<a href="{url|urlescape}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a>
+      <a href="{url|urlescape}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a> |
+      <a href="{url|urlescape}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a> |
+      <a href="{url|urlescape}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a>
       {rename%filelogrename}
     </td>
   </tr>'
--- a/mercurial/templates/monoblue/summary.tmpl	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/templates/monoblue/summary.tmpl	Tue Oct 20 15:59:10 2015 -0500
@@ -26,6 +26,7 @@
             <li><a href="{url|urlescape}bookmarks{sessionvars%urlparameter}">bookmarks</a></li>
             <li><a href="{url|urlescape}branches{sessionvars%urlparameter}">branches</a></li>
             <li><a href="{url|urlescape}file{sessionvars%urlparameter}">files</a></li>
+            {archives%archiveentry}
             <li><a href="{url|urlescape}help{sessionvars%urlparameter}">help</a></li>
         </ul>
     </div>
--- a/mercurial/templates/paper/filerevision.tmpl	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/templates/paper/filerevision.tmpl	Tue Oct 20 15:59:10 2015 -0500
@@ -71,8 +71,7 @@
 <div class="overflow">
 <div class="sourcefirst linewraptoggle">line wrap: <a class="linewraplink" href="javascript:toggleLinewrap()">on</a></div>
 <div class="sourcefirst"> line source</div>
-<pre class="sourcelines stripes4 wrap">{text%fileline}</pre>
-<div class="sourcelast"></div>
+<pre class="sourcelines stripes4 wrap bottomline">{text%fileline}</pre>
 </div>
 </div>
 </div>
--- a/mercurial/templates/rss/error.tmpl	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/templates/rss/error.tmpl	Tue Oct 20 15:59:10 2015 -0500
@@ -4,7 +4,7 @@
     <item>
       <title>Error</title>
       <description>{error|escape}</description>
-      <guid>http://mercurial.selenic.com/#error</guid>
+      <guid>https://mercurial-scm.org/#error</guid>
     </item>
   </channel>
 </rss>
--- a/mercurial/templates/static/style-coal.css	Thu Oct 08 23:24:38 2015 +0900
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,368 +0,0 @@
-body {
-  margin: 0;
-  padding: 0;
-  background: black url(background.png) repeat-x;
-  font-family: sans-serif;
-}
-
-.container {
-  padding-right: 150px;
-}
-
-.main {
-  position: relative;
-  background: white;
-  padding: 2em;
-  border-right: 15px solid black;
-  border-bottom: 15px solid black;
-}
-
-#.main {
-  width: 98%;
-}
-
-.overflow {
-  width: 100%;
-  overflow: auto;
-}
-
-.menu {
-  background: #999;
-  padding: 10px;
-  width: 75px;
-  margin: 0;
-  font-size: 80%;
-  text-align: left;
-  position: fixed;
-  top: 27px;
-  left: auto;
-  right: 27px;
-}
-
-#.menu {
-  position: absolute !important;
-  top:expression(eval(document.body.scrollTop + 27));
-}
-
-.menu ul {
-  list-style: none;
-  padding: 0;
-  margin: 10px 0 0 0;
-}
-
-.menu li {
-  margin-bottom: 3px;
-  padding: 2px 4px;
-  background: white;
-  color: black;
-  font-weight: normal;
-}
-
-.menu li.active {
-  background: black;
-  color: white;
-}
-
-.menu img {
-  width: 75px;
-  height: 90px;
-  border: 0;
-}
-
-.menu a { color: black; display: block; }
-
-.search {
-  position: absolute;
-  top: .7em;
-  right: 2em;
-}
-
-form.search div#hint {
-  display: none;
-  position: absolute;
-  top: 40px;
-  right: 0px;
-  width: 190px;
-  padding: 5px;
-  background: #ffc;
-  font-size: 70%;
-  border: 1px solid yellow;
-  -moz-border-radius: 5px; /* this works only in camino/firefox */
-  -webkit-border-radius: 5px; /* this is just for Safari */
-}
-
-form.search:hover div#hint { display: block; }
-
-a { text-decoration:none; }
-.age { white-space:nowrap; }
-.date { white-space:nowrap; }
-.indexlinks { white-space:nowrap; }
-.parity0,
-.stripes4 > :nth-child(4n+1),
-.stripes2 > :nth-child(2n+1) { background-color: #f0f0f0; }
-.parity1,
-.stripes4 > :nth-child(4n+3),
-.stripes2 > :nth-child(2n+2) { background-color: white; }
-.plusline { color: green; }
-.minusline { color: #dc143c; } /* crimson */
-.atline { color: purple; }
-
-.diffstat-file {
-  white-space: nowrap;
-  font-size: 90%;
-}
-.diffstat-total {
-  white-space: nowrap;
-  font-size: 90%;
-}
-.diffstat-graph {
-  width: 100%;
-}
-.diffstat-add {
-  background-color: green;
-  float: left;
-}
-.diffstat-remove {
-  background-color: red;
-  float: left;
-}
-
-.navigate {
-  text-align: right;
-  font-size: 60%;
-  margin: 1em 0;
-}
-
-.tag {
-  color: #999;
-  font-size: 70%;
-  font-weight: normal;
-  margin-left: .5em;
-  vertical-align: baseline;
-}
-
-.branchhead {
-  color: #000;
-  font-size: 80%;
-  font-weight: normal;
-  margin-left: .5em;
-  vertical-align: baseline;
-}
-
-ul#graphnodes .branchhead {
-  font-size: 75%;
-}
-
-.branchname {
-  color: #000;
-  font-size: 60%;
-  font-weight: normal;
-  margin-left: .5em;
-  vertical-align: baseline;
-}
-
-h3 .branchname {
-  font-size: 80%;
-}
-
-/* Common */
-pre { margin: 0; }
-
-h2 { font-size: 120%; border-bottom: 1px solid #999; }
-h2 a { color: #000; }
-h3 {
-  margin-top: -.7em;
-  font-size: 100%;
-}
-
-/* log and tags tables */
-.bigtable {
-  border-bottom: 1px solid #999;
-  border-collapse: collapse;
-  font-size: 90%;
-  width: 100%;
-  font-weight: normal;
-  text-align: left;
-}
-
-.bigtable td {
-  vertical-align: top;
-}
-
-.bigtable th {
-  padding: 1px 4px;
-  border-bottom: 1px solid #999;
-}
-.bigtable tr { border: none; }
-.bigtable .age { width: 6em; }
-.bigtable .author { width: 15em; }
-.bigtable .description { }
-.bigtable .description .base { font-size: 70%; float: right; line-height: 1.66; }
-.bigtable .node { width: 5em; font-family: monospace;}
-.bigtable .lineno { width: 2em; text-align: right;}
-.bigtable .lineno a { color: #999; font-size: smaller; font-family: monospace;}
-.bigtable .permissions { width: 8em; text-align: left;}
-.bigtable .size { width: 5em; text-align: right; }
-.bigtable .annotate { text-align: right; }
-.bigtable td.annotate { font-size: smaller; }
-.bigtable td.source { font-size: inherit; }
-
-.source, .sourcefirst, .sourcelast {
-  font-family: monospace;
-  white-space: pre;
-  padding: 1px 4px;
-  font-size: 90%;
-}
-.sourcefirst { border-bottom: 1px solid #999; font-weight: bold; }
-.sourcelast { border-top: 1px solid #999; }
-.source a { color: #999; font-size: smaller; font-family: monospace;}
-.bottomline { border-bottom: 1px solid #999; }
-
-.sourcelines > div {
-  display: inline-block;
-  width: 100%;
-  padding: 1px 0px;
-  counter-increment: lineno;
-}
-
-.fileline { font-family: monospace; }
-.fileline img { border: 0; }
-
-.tagEntry .closed { color: #99f; }
-
-/* Changeset entry */
-#changesetEntry {
-  border-collapse: collapse;
-  font-size: 90%;
-  width: 100%;
-  margin-bottom: 1em;
-}
-
-#changesetEntry th {
-  padding: 1px 4px;
-  width: 4em;
-  text-align: right;
-  font-weight: normal;
-  color: #999;
-  margin-right: .5em;
-  vertical-align: top;
-}
-
-div.description {
-  border-left: 3px solid #999;
-  margin: 1em 0 1em 0;
-  padding: .3em;
-  white-space: pre;
-  font-family: monospace;
-}
-
-/* Graph */
-div#wrapper {
-	position: relative;
-	border-top: 1px solid black;
-	border-bottom: 1px solid black;
-	margin: 0;
-	padding: 0;
-}
-
-canvas {
-	position: absolute;
-	z-index: 5;
-	top: -0.7em;
-	margin: 0;
-}
-
-ul#graphnodes {
-	position: absolute;
-	z-index: 10;
-	top: -1.0em;
-	list-style: none inside none;
-	padding: 0;
-}
-
-ul#nodebgs {
-	list-style: none inside none;
-	padding: 0;
-	margin: 0;
-	top: -0.7em;
-}
-
-ul#graphnodes li, ul#nodebgs li {
-	height: 39px;
-}
-
-ul#graphnodes li .info {
-	display: block;
-	font-size: 70%;
-	position: relative;
-	top: -3px;
-}
-
-/* Comparison */
-.legend {
-    padding: 1.5% 0 1.5% 0;
-}
-
-.legendinfo {
-    border: 1px solid #999;
-    font-size: 80%;
-    text-align: center;
-    padding: 0.5%;
-}
-
-.equal {
-    background-color: #ffffff;
-}
-
-.delete {
-    background-color: #faa;
-    color: #333;
-}
-
-.insert {
-    background-color: #ffa;
-}
-
-.replace {
-    background-color: #e8e8e8;
-}
-
-.header {
-    text-align: center;
-}
-
-.block {
-    border-top: 1px solid #999;
-}
-
-.breadcrumb {
-    color: gray;
-}
-
-.breadcrumb a {
-    color: blue;
-}
-
-.scroll-loading {
-    -webkit-animation: change_color 1s linear 0s infinite alternate;
-    -moz-animation: change_color 1s linear 0s infinite alternate;
-    -o-animation: change_color 1s linear 0s infinite alternate;
-    animation: change_color 1s linear 0s infinite alternate;
-}
-
-@-webkit-keyframes change_color {
-  from { background-color: #A0CEFF; } to {  }
-}
-@-moz-keyframes change_color {
-  from { background-color: #A0CEFF; } to {  }
-}
-@-o-keyframes change_color {
-  from { background-color: #A0CEFF; } to {  }
-}
-@keyframes change_color {
-  from { background-color: #A0CEFF; } to {  }
-}
-
-.scroll-loading-error {
-    background-color: #FFCCCC !important;
-}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/templates/static/style-extra-coal.css	Tue Oct 20 15:59:10 2015 -0500
@@ -0,0 +1,46 @@
+body {
+    background: black url('background.png') repeat-x;
+}
+
+.container {
+    padding-left: 0;
+    padding-right: 150px;
+}
+
+.main {
+    padding: 2em;
+    border-right: 15px solid black;
+    border-bottom: 15px solid black;
+}
+
+.menu {
+    background: #999;
+    padding: 10px;
+    width: 75px;
+    position: fixed;
+    top: 27px;
+    left: auto;
+    right: 27px;
+}
+
+.menu ul {
+    border-left: 0;
+}
+
+.menu li.active {
+    font-weight: normal;
+    background: black;
+    color: white;
+}
+
+.menu li.active a {
+    color: white;
+}
+
+h3 {
+    margin-top: -.7em;
+}
+
+div.description {
+    border-left-width: 3px;
+}
--- a/mercurial/templates/static/style-gitweb.css	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/templates/static/style-gitweb.css	Tue Oct 20 15:59:10 2015 -0500
@@ -1,4 +1,4 @@
-body { font-family: sans-serif; font-size: 12px; border:solid #d9d8d1; border-width:1px; margin:10px; }
+body { font-family: sans-serif; font-size: 12px; border:solid #d9d8d1; border-width:1px; margin:10px; background: white; color: black; }
 a { color:#0000cc; }
 a:hover, a:visited, a:active { color:#880000; }
 div.page_header { height:25px; padding:8px; font-size:18px; font-weight:bold; background-color:#d9d8d1; }
@@ -29,9 +29,12 @@
 a.list:hover { text-decoration:underline; color:#880000; }
 table { padding:8px 4px; }
 th { padding:2px 5px; font-size:12px; text-align:left; }
-tr.light:hover, .parity0:hover { background-color:#edece6; }
-tr.dark, .parity1 { background-color:#f6f6f0; }
-tr.dark:hover, .parity1:hover { background-color:#edece6; }
+tr.dark, .parity1, pre.sourcelines.stripes > :nth-child(4n+4) { background-color:#f6f6f0; }
+tr.light:hover, .parity0:hover, tr.dark:hover, .parity1:hover,
+pre.sourcelines.stripes > :nth-child(4n+2):hover,
+pre.sourcelines.stripes > :nth-child(4n+4):hover,
+pre.sourcelines.stripes > :nth-child(4n+1):hover + :nth-child(4n+2),
+pre.sourcelines.stripes > :nth-child(4n+3):hover + :nth-child(4n+4) { background-color:#edece6; }
 td { padding:2px 5px; font-size:12px; vertical-align:top; }
 td.closed { background-color: #99f; }
 td.link { padding:2px 5px; font-family:sans-serif; font-size:10px; }
@@ -87,6 +90,43 @@
 span.difflineplus { color:#008800; }
 span.difflineminus { color:#cc0000; }
 span.difflineat { color:#990099; }
+div.diffblocks { counter-reset: lineno; }
+div.diffblock { counter-increment: lineno; }
+pre.sourcelines { position: relative; counter-reset: lineno; }
+pre.sourcelines > span {
+	display: inline-block;
+	box-sizing: border-box;
+	width: 100%;
+	padding: 0 0 0 5em;
+	counter-increment: lineno;
+	vertical-align: top;
+}
+pre.sourcelines > span:before {
+	-moz-user-select: -moz-none;
+	-khtml-user-select: none;
+	-webkit-user-select: none;
+	-ms-user-select: none;
+	user-select: none;
+	display: inline-block;
+	margin-left: -5em;
+	width: 4em;
+	color: #999;
+	text-align: right;
+	content: counters(lineno,".");
+	float: left;
+}
+pre.sourcelines > a {
+	display: inline-block;
+	position: absolute;
+	left: 0px;
+	width: 4em;
+	height: 1em;
+}
+tr:target td,
+pre.sourcelines > span:target,
+pre.sourcelines.stripes > span:target {
+	background-color: #bfdfff;
+}
 
 /* Graph */
 div#wrapper {
@@ -194,3 +234,7 @@
 .scroll-loading-error {
     background-color: #FFCCCC !important;
 }
+
+#doc {
+    margin: 0 8px;
+}
--- a/mercurial/templates/static/style-monoblue.css	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/templates/static/style-monoblue.css	Tue Oct 20 15:59:10 2015 -0500
@@ -255,34 +255,69 @@
   font-family: monospace;
   white-space: pre;
   font-size: 1.2em;
-  padding: 3px 0;
 }
+div.diffblocks { counter-reset: lineno; }
+div.diffblock { counter-increment: lineno; }
 span.difflineplus { color:#008800; }
 span.difflineminus { color:#cc0000; }
 span.difflineat { color:#990099; }
 
+pre.sourcelines {
+    position: relative;
+    counter-reset: lineno;
+    font-size: 1.2em;
+}
+pre.sourcelines > span {
+    display: inline-block;
+    box-sizing: border-box;
+    width: 100%;
+    padding: 0 0 0 5em;
+    counter-increment: lineno;
+    vertical-align: top;
+}
+div.source > pre.sourcelines > span {
+    padding: 1px 1px 1px 5em;
+}
+pre.sourcelines > span:before {
+    -moz-user-select: -moz-none;
+    -khtml-user-select: none;
+    -webkit-user-select: none;
+    -ms-user-select: none;
+    user-select: none;
+    display: inline-block;
+    margin-left: -5em;
+    width: 4em;
+    color: #999;
+    text-align: right;
+    content: counters(lineno,".");
+    float: left;
+}
+pre.sourcelines > a {
+    display: inline-block;
+    position: absolute;
+    left: 0px;
+    width: 4em;
+    height: 1em;
+    padding: 0.15em;
+}
+pre.sourcelines.stripes > :nth-child(4n+2) { background-color: #F1F6F7; }
+pre.sourcelines.stripes > :nth-child(4n+4) { background-color: #FFFFFF; }
+pre.sourcelines.stripes > :nth-child(4n+2):hover,
+pre.sourcelines.stripes > :nth-child(4n+4):hover,
+pre.sourcelines.stripes > :nth-child(4n+1):hover + :nth-child(4n+2),
+pre.sourcelines.stripes > :nth-child(4n+3):hover + :nth-child(4n+4) { background-color: #D5E1E6; }
+
+pre.sourcelines > span:target,
+pre.sourcelines.stripes > span:target {
+    background-color: #bfdfff;
+}
+
 td.source {
   white-space: pre;
-  font-family: monospace;
   margin: 10px 30px 0;
   font-size: 1.2em;
   font-family: monospace;
 }
-  div.source div.parity0,
-  div.source div.parity1 {
-    padding: 1px;
-    font-size: 1.2em;
-  }
-  div.source div.parity0 {
-    background: #F1F6F7;
-  }
-  div.source div.parity1 {
-    background: #FFFFFF;
-  }
-div.parity0:hover,
-div.parity1:hover {
-  background: #D5E1E6;
-}
 .linenr {
   color: #999;
   text-align: right;
@@ -311,44 +346,6 @@
 div#powered-by a:hover {
   text-decoration: underline;
 }
-/*
-div#monoblue-corner-top-left {
-  position: absolute;
-  top: 0;
-  left: 0;
-  width: 10px;
-  height: 10px;
-  background: url(./monoblue-corner.png) top left no-repeat !important;
-  background: none;
-}
-div#monoblue-corner-top-right {
-  position: absolute;
-  top: 0;
-  right: 0;
-  width: 10px;
-  height: 10px;
-  background: url(./monoblue-corner.png) top right no-repeat !important;
-  background: none;
-}
-div#monoblue-corner-bottom-left {
-  position: absolute;
-  bottom: 0;
-  left: 0;
-  width: 10px;
-  height: 10px;
-  background: url(./monoblue-corner.png) bottom left no-repeat !important;
-  background: none;
-}
-div#monoblue-corner-bottom-right {
-  position: absolute;
-  bottom: 0;
-  right: 0;
-  width: 10px;
-  height: 10px;
-  background: url(./monoblue-corner.png) bottom right no-repeat !important;
-  background: none;
-}
-*/
 /** end of common settings **/
 
 /** summary **/
@@ -553,3 +550,7 @@
 .scroll-loading-error {
     background-color: #FFCCCC !important;
 }
+
+#doc {
+    margin: 0 30px;
+}
--- a/mercurial/templates/static/style-paper.css	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/templates/static/style-paper.css	Tue Oct 20 15:59:10 2015 -0500
@@ -2,6 +2,7 @@
   margin: 0;
   padding: 0;
   background: white;
+  color: black;
   font-family: sans-serif;
 }
 
@@ -88,8 +89,7 @@
   background: #ffc;
   font-size: 70%;
   border: 1px solid yellow;
-  -moz-border-radius: 5px; /* this works only in camino/firefox */
-  -webkit-border-radius: 5px; /* this is just for Safari */
+  border-radius: 5px;
 }
 
 form.search:hover div#hint { display: block; }
@@ -209,14 +209,13 @@
 .bigtable td.annotate { font-size: smaller; }
 .bigtable td.source { font-size: inherit; }
 
-.source, .sourcefirst, .sourcelast {
+.source, .sourcefirst {
   font-family: monospace;
   white-space: pre;
   padding: 1px 4px;
   font-size: 90%;
 }
 .sourcefirst { border-bottom: 1px solid #999; font-weight: bold; }
-.sourcelast { border-top: 1px solid #999; }
 .source a { color: #999; font-size: smaller; font-family: monospace;}
 .bottomline { border-bottom: 1px solid #999; }
 
--- a/mercurial/templates/static/style.css	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/templates/static/style.css	Tue Oct 20 15:59:10 2015 -0500
@@ -2,8 +2,8 @@
 .age { white-space:nowrap; }
 .date { white-space:nowrap; }
 .indexlinks { white-space:nowrap; }
-.parity0 { background-color: #ddd; }
-.parity1 { background-color: #eee; }
+.parity0 { background-color: #ddd; color: #000; }
+.parity1 { background-color: #eee; color: #000; }
 .lineno { width: 60px; color: #aaa; font-size: smaller;
           text-align: right; }
 .plusline { color: green; }
--- a/mercurial/transaction.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/transaction.py	Tue Oct 20 15:59:10 2015 -0500
@@ -11,9 +11,15 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-from i18n import _
+from __future__ import absolute_import
+
 import errno
-import error, util
+
+from .i18n import _
+from . import (
+    error,
+    util,
+)
 
 version = 2
 
@@ -65,25 +71,25 @@
                 except (IOError, OSError) as inst:
                     if inst.errno != errno.ENOENT:
                         raise
-        except (IOError, OSError, util.Abort) as inst:
+        except (IOError, OSError, error.Abort) as inst:
             if not c:
                 raise
 
-    opener.unlink(journal)
     backuppath = "%s.backupfiles" % journal
     if opener.exists(backuppath):
         opener.unlink(backuppath)
+    opener.unlink(journal)
     try:
         for f in backupfiles:
             if opener.exists(f):
                 opener.unlink(f)
-    except (IOError, OSError, util.Abort) as inst:
+    except (IOError, OSError, error.Abort) as inst:
         # only pure backup file remains, it is sage to ignore any error
         pass
 
 class transaction(object):
     def __init__(self, report, opener, vfsmap, journalname, undoname=None,
-                 after=None, createmode=None, validator=None):
+                 after=None, createmode=None, validator=None, releasefn=None):
         """Begin a new transaction
 
         Begins a new transaction that allows rolling back writes in the event of
@@ -91,6 +97,7 @@
 
         * `after`: called after the transaction has been committed
         * `createmode`: the mode of the journal file that will be created
+        * `releasefn`: called after releasing (with transaction and result)
         """
         self.count = 1
         self.usages = 1
@@ -113,6 +120,11 @@
         if validator is None:
             validator = lambda tr: None
         self.validator = validator
+        # A callback to do something just after releasing transaction.
+        if releasefn is None:
+            releasefn = lambda tr, success: None
+        self.releasefn = releasefn
+
         # a dict of arguments to be passed to hooks
         self.hookargs = {}
         self.file = opener.open(self.journal, "w")
@@ -398,44 +410,48 @@
         # cleanup temporary files
         for l, f, b, c in self._backupentries:
             if l not in self._vfsmap and c:
-                self.report("couldn't remote %s: unknown cache location %s\n"
+                self.report("couldn't remove %s: unknown cache location %s\n"
                             % (b, l))
                 continue
             vfs = self._vfsmap[l]
             if not f and b and vfs.exists(b):
                 try:
                     vfs.unlink(b)
-                except (IOError, OSError, util.Abort) as inst:
+                except (IOError, OSError, error.Abort) as inst:
                     if not c:
                         raise
                     # Abort may be raise by read only opener
-                    self.report("couldn't remote %s: %s\n"
+                    self.report("couldn't remove %s: %s\n"
                                 % (vfs.join(b), inst))
         self.entries = []
         self._writeundo()
         if self.after:
             self.after()
-        if self.opener.isfile(self.journal):
-            self.opener.unlink(self.journal)
         if self.opener.isfile(self._backupjournal):
             self.opener.unlink(self._backupjournal)
+        if self.opener.isfile(self.journal):
+            self.opener.unlink(self.journal)
+        if True:
             for l, _f, b, c in self._backupentries:
                 if l not in self._vfsmap and c:
-                    self.report("couldn't remote %s: unknown cache location"
+                    self.report("couldn't remove %s: unknown cache location"
                                 "%s\n" % (b, l))
                     continue
                 vfs = self._vfsmap[l]
                 if b and vfs.exists(b):
                     try:
                         vfs.unlink(b)
-                    except (IOError, OSError, util.Abort) as inst:
+                    except (IOError, OSError, error.Abort) as inst:
                         if not c:
                             raise
                         # Abort may be raise by read only opener
-                        self.report("couldn't remote %s: %s\n"
+                        self.report("couldn't remove %s: %s\n"
                                     % (vfs.join(b), inst))
         self._backupentries = []
         self.journal = None
+
+        self.releasefn(self, True) # notify success of closing transaction
+
         # run post close action
         categories = sorted(self._postclosecallback)
         for cat in categories:
@@ -461,7 +477,7 @@
                 u = ''
             else:
                 if l not in self._vfsmap and c:
-                    self.report("couldn't remote %s: unknown cache location"
+                    self.report("couldn't remove %s: unknown cache location"
                                 "%s\n" % (b, l))
                     continue
                 vfs = self._vfsmap[l]
@@ -482,10 +498,10 @@
 
         try:
             if not self.entries and not self._backupentries:
+                if self._backupjournal:
+                    self.opener.unlink(self._backupjournal)
                 if self.journal:
                     self.opener.unlink(self.journal)
-                if self._backupjournal:
-                    self.opener.unlink(self._backupjournal)
                 return
 
             self.report(_("transaction abort!\n"))
@@ -500,7 +516,7 @@
                 self.report(_("rollback failed - please run hg recover\n"))
         finally:
             self.journal = None
-
+            self.releasefn(self, False) # notify failure of transaction
 
 def rollback(opener, vfsmap, file, report):
     """Rolls back the transaction contained in the given file
--- a/mercurial/treediscovery.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/treediscovery.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,10 +5,18 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
+from __future__ import absolute_import
+
 import collections
-from node import nullid, short
-from i18n import _
-import util, error
+
+from .i18n import _
+from .node import (
+    nullid,
+    short,
+)
+from . import (
+    error,
+)
 
 def findcommonincoming(repo, remote, heads=None, force=False):
     """Return a tuple (common, fetch, heads) used to identify the common
@@ -140,7 +148,7 @@
         if force:
             repo.ui.warn(_("warning: repository is unrelated\n"))
         else:
-            raise util.Abort(_("repository is unrelated"))
+            raise error.Abort(_("repository is unrelated"))
 
     repo.ui.debug("found new changesets starting at " +
                  " ".join([short(f) for f in fetch]) + "\n")
--- a/mercurial/ui.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/ui.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,11 +5,28 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
+from __future__ import absolute_import
+
+import errno
+import getpass
 import inspect
-from i18n import _
-import errno, getpass, os, socket, sys, tempfile, traceback
-import config, scmutil, util, error, formatter, progress
-from node import hex
+import os
+import socket
+import sys
+import tempfile
+import traceback
+
+from .i18n import _
+from .node import hex
+
+from . import (
+    config,
+    error,
+    formatter,
+    progress,
+    scmutil,
+    util,
+)
 
 samplehgrcs = {
     'user':
@@ -518,11 +535,12 @@
             except KeyError:
                 pass
         if not user:
-            raise util.Abort(_('no username supplied'),
+            raise error.Abort(_('no username supplied'),
                              hint=_('use "hg config --edit" '
                                     'to set your username'))
         if "\n" in user:
-            raise util.Abort(_("username %s contains a newline\n") % repr(user))
+            raise error.Abort(_("username %s contains a newline\n")
+                              % repr(user))
         return user
 
     def shortuser(self, user):
@@ -533,12 +551,21 @@
 
     def expandpath(self, loc, default=None):
         """Return repository location relative to cwd or from [paths]"""
-        if util.hasscheme(loc) or os.path.isdir(os.path.join(loc, '.hg')):
-            return loc
+        try:
+            p = self.paths.getpath(loc)
+            if p:
+                return p.rawloc
+        except error.RepoError:
+            pass
 
-        p = self.paths.getpath(loc, default=default)
-        if p:
-            return p.loc
+        if default:
+            try:
+                p = self.paths.getpath(default)
+                if p:
+                    return p.rawloc
+            except error.RepoError:
+                pass
+
         return loc
 
     @util.propertycache
@@ -729,7 +756,7 @@
                 self.write(r, "\n")
             return r
         except EOFError:
-            raise util.Abort(_('response expected'))
+            raise error.Abort(_('response expected'))
 
     @staticmethod
     def extractchoices(prompt):
@@ -776,7 +803,7 @@
             else:
                 return getpass.getpass('')
         except EOFError:
-            raise util.Abort(_('response expected'))
+            raise error.Abort(_('response expected'))
     def status(self, *msg, **opts):
         '''write status message to output (if ui.quiet is False)
 
@@ -808,7 +835,10 @@
         if self.debugflag:
             opts['label'] = opts.get('label', '') + ' ui.debug'
             self.write(*msg, **opts)
-    def edit(self, text, user, extra={}, editform=None):
+
+    def edit(self, text, user, extra=None, editform=None, pending=None):
+        if extra is None:
+            extra = {}
         (fd, name) = tempfile.mkstemp(prefix="hg-editor-", suffix=".txt",
                                       text=True)
         try:
@@ -825,12 +855,14 @@
                     break
             if editform:
                 environ.update({'HGEDITFORM': editform})
+            if pending:
+                environ.update({'HG_PENDING': pending})
 
             editor = self.geteditor()
 
             self.system("%s \"%s\"" % (editor, name),
                         environ=environ,
-                        onerr=util.Abort, errprefix=_("edit failed"))
+                        onerr=error.Abort, errprefix=_("edit failed"))
 
             f = open(name)
             t = f.read()
@@ -840,7 +872,7 @@
 
         return t
 
-    def system(self, cmd, environ={}, cwd=None, onerr=None, errprefix=None):
+    def system(self, cmd, environ=None, cwd=None, onerr=None, errprefix=None):
         '''execute shell command with appropriate output stream. command
         output will be redirected if fout is not stdout.
         '''
@@ -899,7 +931,7 @@
 
     def _progclear(self):
         """clear progress bar output if any. use it before any output"""
-        if '_progbar' not in vars(self): # nothing loadef yet
+        if '_progbar' not in vars(self): # nothing loaded yet
             return
         if self._progbar is not None and self._progbar.printed:
             self._progbar.clear()
@@ -943,9 +975,12 @@
 
         service should be a readily-identifiable subsystem, which will
         allow filtering.
-        message should be a newline-terminated string to log.
+
+        *msg should be a newline-terminated format string to log, and
+        then any values to %-format into that format string.
+
+        **opts currently has no defined meanings.
         '''
-        pass
 
     def label(self, msg, label):
         '''style msg based on supplied label
@@ -963,7 +998,7 @@
         """issue a developer warning message"""
         msg = 'devel-warn: ' + msg
         if self.tracebackflag:
-            util.debugstacktrace(msg, 2)
+            util.debugstacktrace(msg, 2, self.ferr, self.fout)
         else:
             curframe = inspect.currentframe()
             calframe = inspect.getouterframes(curframe, 2)
@@ -982,37 +1017,102 @@
             # No location is the same as not existing.
             if not loc:
                 continue
+
+            # TODO ignore default-push once all consumers stop referencing it
+            # since it is handled specifically below.
+
             self[name] = path(name, rawloc=loc)
 
+        # Handle default-push, which is a one-off that defines the push URL for
+        # the "default" path.
+        defaultpush = ui.config('paths', 'default-push')
+        if defaultpush and 'default' in self:
+            self['default']._pushloc = defaultpush
+
     def getpath(self, name, default=None):
-        """Return a ``path`` for the specified name, falling back to a default.
+        """Return a ``path`` from a string, falling back to a default.
+
+        ``name`` can be a named path or locations. Locations are filesystem
+        paths or URIs.
 
-        Returns the first of ``name`` or ``default`` that is present, or None
-        if neither is present.
+        Returns None if ``name`` is not a registered path, a URI, or a local
+        path to a repo.
         """
+        # Only fall back to default if no path was requested.
+        if name is None:
+            if default:
+                try:
+                    return self[default]
+                except KeyError:
+                    return None
+            else:
+                return None
+
+        # Most likely empty string.
+        # This may need to raise in the future.
+        if not name:
+            return None
+
         try:
             return self[name]
         except KeyError:
-            if default is not None:
-                try:
-                    return self[default]
-                except KeyError:
-                    pass
+            # Try to resolve as a local path or URI.
+            try:
+                return path(None, rawloc=name)
+            except ValueError:
+                raise error.RepoError(_('repository %s does not exist') %
+                                        name)
 
-        return None
+        assert False
 
 class path(object):
     """Represents an individual path and its configuration."""
 
-    def __init__(self, name, rawloc=None):
+    def __init__(self, name, rawloc=None, pushloc=None):
         """Construct a path from its config options.
 
         ``name`` is the symbolic name of the path.
         ``rawloc`` is the raw location, as defined in the config.
+        ``pushloc`` is the raw locations pushes should be made to.
+
+        If ``name`` is not defined, we require that the location be a) a local
+        filesystem path with a .hg directory or b) a URL. If not,
+        ``ValueError`` is raised.
         """
+        if not rawloc:
+            raise ValueError('rawloc must be defined')
+
+        # Locations may define branches via syntax <base>#<branch>.
+        u = util.url(rawloc)
+        branch = None
+        if u.fragment:
+            branch = u.fragment
+            u.fragment = None
+
+        self.url = u
+        self.branch = branch
+
         self.name = name
-        # We'll do more intelligent things with rawloc in the future.
-        self.loc = rawloc
+        self.rawloc = rawloc
+        self.loc = str(u)
+        self._pushloc = pushloc
+
+        # When given a raw location but not a symbolic name, validate the
+        # location is valid.
+        if not name and not u.scheme and not self._isvalidlocalpath(self.loc):
+            raise ValueError('location is not a URL or path to a local '
+                             'repo: %s' % rawloc)
+
+    def _isvalidlocalpath(self, path):
+        """Returns True if the given path is a potentially valid repository.
+        This is its own function so that extensions can change the definition of
+        'valid' in this case (like when pulling from a git repo into a hg
+        one)."""
+        return os.path.isdir(os.path.join(path, '.hg'))
+
+    @property
+    def pushloc(self):
+        return self._pushloc or self.loc
 
 # we instantiate one globally shared progress bar to avoid
 # competing progress bars when multiple UI objects get created
--- a/mercurial/unionrepo.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/unionrepo.py	Tue Oct 20 15:59:10 2015 -0500
@@ -11,11 +11,26 @@
 allowing operations like diff and log with revsets.
 """
 
-from node import nullid
-from i18n import _
+from __future__ import absolute_import
+
 import os
-import util, mdiff, cmdutil, scmutil
-import localrepo, changelog, manifest, filelog, revlog, pathutil
+
+from .i18n import _
+from .node import nullid
+
+from . import (
+    changelog,
+    cmdutil,
+    error,
+    filelog,
+    localrepo,
+    manifest,
+    mdiff,
+    pathutil,
+    revlog,
+    scmutil,
+    util,
+)
 
 class unionrevlog(revlog.revlog):
     def __init__(self, opener, indexfile, revlog2, linkmapper):
@@ -35,7 +50,7 @@
         for rev2 in self.revlog2:
             rev = self.revlog2.index[rev2]
             # rev numbers - in revlog2, very different from self.rev
-            _start, _csize, _rsize, _base, linkrev, p1rev, p2rev, node = rev
+            _start, _csize, _rsize, base, linkrev, p1rev, p2rev, node = rev
 
             if linkmapper is None: # link is to same revlog
                 assert linkrev == rev2 # we never link back
@@ -43,6 +58,9 @@
             else: # rev must be mapped from repo2 cl to unified cl by linkmapper
                 link = linkmapper(linkrev)
 
+            if linkmapper is not None: # link is to same revlog
+                base = linkmapper(base)
+
             if node in self.nodemap:
                 # this happens for the common revlog revisions
                 self.bundlerevs.add(self.nodemap[node])
@@ -51,7 +69,7 @@
             p1node = self.revlog2.node(p1rev)
             p2node = self.revlog2.node(p2rev)
 
-            e = (None, None, None, None,
+            e = (None, None, None, base,
                  link, self.rev(p1node), self.rev(p2node), node)
             self.index.insert(-1, e)
             self.nodemap[node] = n
@@ -214,7 +232,7 @@
 
 def instance(ui, path, create):
     if create:
-        raise util.Abort(_('cannot create new union repository'))
+        raise error.Abort(_('cannot create new union repository'))
     parentpath = ui.config("bundle", "mainreporoot", "")
     if not parentpath:
         # try to find the correct path to the working directory repo
--- a/mercurial/url.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/url.py	Tue Oct 20 15:59:10 2015 -0500
@@ -7,10 +7,24 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-import urllib, urllib2, httplib, os, socket, cStringIO, base64
-from i18n import _
-import keepalive, util, sslutil
-import httpconnection as httpconnectionmod
+from __future__ import absolute_import
+
+import base64
+import cStringIO
+import httplib
+import os
+import socket
+import urllib
+import urllib2
+
+from .i18n import _
+from . import (
+    error,
+    httpconnection as httpconnectionmod,
+    keepalive,
+    sslutil,
+    util,
+)
 
 class passwordmgr(urllib2.HTTPPasswordMgrWithDefaultRealm):
     def __init__(self, ui):
@@ -35,7 +49,7 @@
             u = util.url(authuri)
             u.query = None
             if not self.ui.interactive():
-                raise util.Abort(_('http authorization required for %s') %
+                raise error.Abort(_('http authorization required for %s') %
                                  util.hidepassword(str(u)))
 
             self.ui.write(_("http authorization required for %s\n") %
@@ -405,17 +419,8 @@
         if req is not self.retried_req:
             self.retried_req = req
             self.retried = 0
-        # In python < 2.5 AbstractDigestAuthHandler raises a ValueError if
-        # it doesn't know about the auth type requested. This can happen if
-        # somebody is using BasicAuth and types a bad password.
-        try:
-            return urllib2.HTTPDigestAuthHandler.http_error_auth_reqed(
-                        self, auth_header, host, req, headers)
-        except ValueError as inst:
-            arg = inst.args[0]
-            if arg.startswith("AbstractDigestAuthHandler doesn't know "):
-                return
-            raise
+        return urllib2.HTTPDigestAuthHandler.http_error_auth_reqed(
+                    self, auth_header, host, req, headers)
 
 class httpbasicauthhandler(urllib2.HTTPBasicAuthHandler):
     def __init__(self, *args, **kwargs):
--- a/mercurial/util.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/util.py	Tue Oct 20 15:59:10 2015 -0500
@@ -19,8 +19,11 @@
 import errno, shutil, sys, tempfile, traceback
 import re as remod
 import os, time, datetime, calendar, textwrap, signal, collections
+import stat
 import imp, socket, urllib
 import gc
+import bz2
+import zlib
 
 if os.name == 'nt':
     import windows as platform
@@ -728,7 +731,11 @@
     global _hgexecutable
     _hgexecutable = path
 
-def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
+def _isstdout(f):
+    fileno = getattr(f, 'fileno', None)
+    return fileno and fileno() == sys.__stdout__.fileno()
+
+def system(cmd, environ=None, cwd=None, onerr=None, errprefix=None, out=None):
     '''enhanced shell command execution.
     run with environment maybe modified, maybe in different dir.
 
@@ -737,6 +744,8 @@
 
     if out is specified, it is assumed to be a file-like object that has a
     write() method. stdout and stderr will be redirected to out.'''
+    if environ is None:
+        environ = {}
     try:
         sys.stdout.flush()
     except Exception:
@@ -761,7 +770,7 @@
         env = dict(os.environ)
         env.update((k, py2shell(v)) for k, v in environ.iteritems())
         env['HG'] = hgexecutable()
-        if out is None or out == sys.__stdout__:
+        if out is None or _isstdout(out):
             rc = subprocess.call(cmd, shell=True, close_fds=closefds,
                                  env=env, cwd=cwd)
         else:
@@ -944,6 +953,20 @@
     except AttributeError:
         return os.stat(fp.name)
 
+def statmtimesec(st):
+    """Get mtime as integer of seconds
+
+    'int(st.st_mtime)' cannot be used because st.st_mtime is computed as
+    'sec + 1e-9 * nsec' and double-precision floating-point type is too narrow
+    to represent nanoseconds. If 'nsec' is close to 1 sec, 'int(st.st_mtime)'
+    can be 'sec + 1'. (issue4836)
+    """
+    try:
+        return st[stat.ST_MTIME]
+    except (TypeError, IndexError):
+        # osutil.stat doesn't allow index access and its st_mtime is int
+        return st.st_mtime
+
 # File system features
 
 def checkcase(path):
@@ -1278,16 +1301,20 @@
                     yield chunk
         self.iter = splitbig(in_iter)
         self._queue = collections.deque()
+        self._chunkoffset = 0
 
     def read(self, l=None):
         """Read L bytes of data from the iterator of chunks of data.
         Returns less than L bytes if the iterator runs dry.
 
         If size parameter is omitted, read everything"""
+        if l is None:
+            return ''.join(self.iter)
+
         left = l
         buf = []
         queue = self._queue
-        while left is None or left > 0:
+        while left > 0:
             # refill the queue
             if not queue:
                 target = 2**18
@@ -1299,14 +1326,40 @@
                 if not queue:
                     break
 
-            chunk = queue.popleft()
-            if left is not None:
-                left -= len(chunk)
-            if left is not None and left < 0:
-                queue.appendleft(chunk[left:])
-                buf.append(chunk[:left])
+            # The easy way to do this would be to queue.popleft(), modify the
+            # chunk (if necessary), then queue.appendleft(). However, for cases
+            # where we read partial chunk content, this incurs 2 dequeue
+            # mutations and creates a new str for the remaining chunk in the
+            # queue. Our code below avoids this overhead.
+
+            chunk = queue[0]
+            chunkl = len(chunk)
+            offset = self._chunkoffset
+
+            # Use full chunk.
+            if offset == 0 and left >= chunkl:
+                left -= chunkl
+                queue.popleft()
+                buf.append(chunk)
+                # self._chunkoffset remains at 0.
+                continue
+
+            chunkremaining = chunkl - offset
+
+            # Use all of unconsumed part of chunk.
+            if left >= chunkremaining:
+                left -= chunkremaining
+                queue.popleft()
+                # offset == 0 is enabled by block above, so this won't merely
+                # copy via ``chunk[0:]``.
+                buf.append(chunk[offset:])
+                self._chunkoffset = 0
+
+            # Partial chunk needed.
             else:
-                buf.append(chunk)
+                buf.append(chunk[offset:offset + left])
+                self._chunkoffset += left
+                left -= chunkremaining
 
         return ''.join(buf)
 
@@ -1371,22 +1424,22 @@
     """turn (timestamp, tzoff) tuple into iso 8631 date."""
     return datestr(date, format='%Y-%m-%d')
 
+def parsetimezone(tz):
+    """parse a timezone string and return an offset integer"""
+    if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
+        sign = (tz[0] == "+") and 1 or -1
+        hours = int(tz[1:3])
+        minutes = int(tz[3:5])
+        return -sign * (hours * 60 + minutes) * 60
+    if tz == "GMT" or tz == "UTC":
+        return 0
+    return None
+
 def strdate(string, format, defaults=[]):
     """parse a localized time string and return a (unixtime, offset) tuple.
     if the string cannot be parsed, ValueError is raised."""
-    def timezone(string):
-        tz = string.split()[-1]
-        if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
-            sign = (tz[0] == "+") and 1 or -1
-            hours = int(tz[1:3])
-            minutes = int(tz[3:5])
-            return -sign * (hours * 60 + minutes) * 60
-        if tz == "GMT" or tz == "UTC":
-            return 0
-        return None
-
     # NOTE: unixtime = localunixtime + offset
-    offset, date = timezone(string), string
+    offset, date = parsetimezone(string.split()[-1]), string
     if offset is not None:
         date = " ".join(string.split()[:-1])
 
@@ -1412,7 +1465,7 @@
         unixtime = localunixtime + offset
     return unixtime, offset
 
-def parsedate(date, formats=None, bias={}):
+def parsedate(date, formats=None, bias=None):
     """parse a localized date/time and return a (unixtime, offset) tuple.
 
     The date may be a "unixtime offset" string or in one of the specified
@@ -1432,6 +1485,8 @@
     >>> tz == strtz
     True
     """
+    if bias is None:
+        bias = {}
     if not date:
         return 0, 0
     if isinstance(date, tuple) and len(date) == 2:
@@ -1565,6 +1620,45 @@
         start, stop = lower(date), upper(date)
         return lambda x: x >= start and x <= stop
 
+def stringmatcher(pattern):
+    """
+    accepts a string, possibly starting with 're:' or 'literal:' prefix.
+    returns the matcher name, pattern, and matcher function.
+    missing or unknown prefixes are treated as literal matches.
+
+    helper for tests:
+    >>> def test(pattern, *tests):
+    ...     kind, pattern, matcher = stringmatcher(pattern)
+    ...     return (kind, pattern, [bool(matcher(t)) for t in tests])
+
+    exact matching (no prefix):
+    >>> test('abcdefg', 'abc', 'def', 'abcdefg')
+    ('literal', 'abcdefg', [False, False, True])
+
+    regex matching ('re:' prefix)
+    >>> test('re:a.+b', 'nomatch', 'fooadef', 'fooadefbar')
+    ('re', 'a.+b', [False, False, True])
+
+    force exact matches ('literal:' prefix)
+    >>> test('literal:re:foobar', 'foobar', 're:foobar')
+    ('literal', 're:foobar', [False, True])
+
+    unknown prefixes are ignored and treated as literals
+    >>> test('foo:bar', 'foo', 'bar', 'foo:bar')
+    ('literal', 'foo:bar', [False, False, True])
+    """
+    if pattern.startswith('re:'):
+        pattern = pattern[3:]
+        try:
+            regex = remod.compile(pattern)
+        except remod.error as e:
+            raise error.ParseError(_('invalid regular expression: %s')
+                                   % e)
+        return 're', pattern, regex.search
+    elif pattern.startswith('literal:'):
+        pattern = pattern[8:]
+    return 'literal', pattern, pattern.__eq__
+
 def shortuser(user):
     """Return a short representation of a user name or email address."""
     f = user.find('@')
@@ -1667,7 +1761,7 @@
             elif not cur_line:
                 cur_line.append(reversed_chunks.pop())
 
-        # this overriding code is imported from TextWrapper of python 2.6
+        # this overriding code is imported from TextWrapper of Python 2.6
         # to calculate columns of string by 'encoding.ucolwidth()'
         def _wrap_chunks(self, chunks):
             colwidth = encoding.ucolwidth
@@ -1831,7 +1925,7 @@
 
     If port is an integer, it's returned as is. If it's a string, it's
     looked up using socket.getservbyname(). If there's no matching
-    service, util.Abort is raised.
+    service, error.Abort is raised.
     """
     try:
         return int(port)
@@ -2260,7 +2354,7 @@
 
 class hooks(object):
     '''A collection of hook functions that can be used to extend a
-    function's behaviour. Hooks are called in lexicographic order,
+    function's behavior. Hooks are called in lexicographic order,
     based on the names of their sources.'''
 
     def __init__(self):
@@ -2338,5 +2432,46 @@
         yield path[:pos]
         pos = path.rfind('/', 0, pos)
 
+# compression utility
+
+class nocompress(object):
+    def compress(self, x):
+        return x
+    def flush(self):
+        return ""
+
+compressors = {
+    None: nocompress,
+    # lambda to prevent early import
+    'BZ': lambda: bz2.BZ2Compressor(),
+    'GZ': lambda: zlib.compressobj(),
+    }
+# also support the old form by courtesies
+compressors['UN'] = compressors[None]
+
+def _makedecompressor(decompcls):
+    def generator(f):
+        d = decompcls()
+        for chunk in filechunkiter(f):
+            yield d.decompress(chunk)
+    def func(fh):
+        return chunkbuffer(generator(fh))
+    return func
+
+def _bz2():
+    d = bz2.BZ2Decompressor()
+    # Bzip2 stream start with BZ, but we stripped it.
+    # we put it back for good measure.
+    d.decompress('BZ')
+    return d
+
+decompressors = {None: lambda fh: fh,
+                 '_truncatedBZ': _makedecompressor(_bz2),
+                 'BZ': _makedecompressor(lambda: bz2.BZ2Decompressor()),
+                 'GZ': _makedecompressor(lambda: zlib.decompressobj()),
+                 }
+# also support the old form by courtesies
+decompressors['UN'] = decompressors[None]
+
 # convenient shortcut
 dst = debugstacktrace
--- a/mercurial/verify.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/verify.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,10 +5,21 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-from node import nullid, short
-from i18n import _
+from __future__ import absolute_import
+
 import os
-import revlog, util, error
+
+from .i18n import _
+from .node import (
+    nullid,
+    short,
+)
+
+from . import (
+    error,
+    revlog,
+    util,
+)
 
 def verify(repo):
     lock = repo.lock()
@@ -39,7 +50,7 @@
     lrugetctx = util.lrucachefunc(repo.changectx)
 
     if not repo.url().startswith('file:'):
-        raise util.Abort(_("cannot verify bundle or remote repos"))
+        raise error.Abort(_("cannot verify bundle or remote repos"))
 
     def err(linkrev, msg, filename=None):
         if linkrev is not None:
--- a/mercurial/win32.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/win32.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,7 +5,14 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-import ctypes, errno, msvcrt, os, subprocess, random
+from __future__ import absolute_import
+
+import ctypes
+import errno
+import msvcrt
+import os
+import random
+import subprocess
 
 _kernel32 = ctypes.windll.kernel32
 _advapi32 = ctypes.windll.advapi32
--- a/mercurial/windows.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/windows.py	Tue Oct 20 15:59:10 2015 -0500
@@ -27,6 +27,74 @@
 
 umask = 0o022
 
+class mixedfilemodewrapper(object):
+    """Wraps a file handle when it is opened in read/write mode.
+
+    fopen() and fdopen() on Windows have a specific-to-Windows requirement
+    that files opened with mode r+, w+, or a+ make a call to a file positioning
+    function when switching between reads and writes. Without this extra call,
+    Python will raise a not very intuitive "IOError: [Errno 0] Error."
+
+    This class wraps posixfile instances when the file is opened in read/write
+    mode and automatically adds checks or inserts appropriate file positioning
+    calls when necessary.
+    """
+    OPNONE = 0
+    OPREAD = 1
+    OPWRITE = 2
+
+    def __init__(self, fp):
+        object.__setattr__(self, '_fp', fp)
+        object.__setattr__(self, '_lastop', 0)
+
+    def __getattr__(self, name):
+        return getattr(self._fp, name)
+
+    def __setattr__(self, name, value):
+        return self._fp.__setattr__(name, value)
+
+    def _noopseek(self):
+        self._fp.seek(0, os.SEEK_CUR)
+
+    def seek(self, *args, **kwargs):
+        object.__setattr__(self, '_lastop', self.OPNONE)
+        return self._fp.seek(*args, **kwargs)
+
+    def write(self, d):
+        if self._lastop == self.OPREAD:
+            self._noopseek()
+
+        object.__setattr__(self, '_lastop', self.OPWRITE)
+        return self._fp.write(d)
+
+    def writelines(self, *args, **kwargs):
+        if self._lastop == self.OPREAD:
+            self._noopeseek()
+
+        object.__setattr__(self, '_lastop', self.OPWRITE)
+        return self._fp.writelines(*args, **kwargs)
+
+    def read(self, *args, **kwargs):
+        if self._lastop == self.OPWRITE:
+            self._noopseek()
+
+        object.__setattr__(self, '_lastop', self.OPREAD)
+        return self._fp.read(*args, **kwargs)
+
+    def readline(self, *args, **kwargs):
+        if self._lastop == self.OPWRITE:
+            self._noopseek()
+
+        object.__setattr__(self, '_lastop', self.OPREAD)
+        return self._fp.readline(*args, **kwargs)
+
+    def readlines(self, *args, **kwargs):
+        if self._lastop == self.OPWRITE:
+            self._noopseek()
+
+        object.__setattr__(self, '_lastop', self.OPREAD)
+        return self._fp.readlines(*args, **kwargs)
+
 def posixfile(name, mode='r', buffering=-1):
     '''Open a file with even more POSIX-like semantics'''
     try:
@@ -37,6 +105,9 @@
         if 'a' in mode:
             fp.seek(0, os.SEEK_END)
 
+        if '+' in mode:
+            return mixedfilemodewrapper(fp)
+
         return fp
     except WindowsError as err:
         # convert to a friendlier exception
--- a/mercurial/wireproto.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/wireproto.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,12 +5,30 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-import urllib, tempfile, os, sys
-from i18n import _
-from node import bin, hex
-import changegroup as changegroupmod, bundle2, pushkey as pushkeymod
-import peer, error, encoding, util, exchange
+from __future__ import absolute_import
+
+import os
+import sys
+import tempfile
+import urllib
 
+from .i18n import _
+from .node import (
+    bin,
+    hex,
+)
+
+from . import (
+    bundle2,
+    changegroup as changegroupmod,
+    encoding,
+    error,
+    exchange,
+    peer,
+    pushkey as pushkeymod,
+    streamclone,
+    util,
+)
 
 class abstractserverproto(object):
     """abstract class that summarizes the protocol API
@@ -58,48 +76,12 @@
         Some protocols may have compressed the contents."""
         raise NotImplementedError()
 
-# abstract batching support
-
-class future(object):
-    '''placeholder for a value to be set later'''
-    def set(self, value):
-        if util.safehasattr(self, 'value'):
-            raise error.RepoError("future is already set")
-        self.value = value
-
-class batcher(object):
-    '''base class for batches of commands submittable in a single request
-
-    All methods invoked on instances of this class are simply queued and
-    return a a future for the result. Once you call submit(), all the queued
-    calls are performed and the results set in their respective futures.
-    '''
-    def __init__(self):
-        self.calls = []
-    def __getattr__(self, name):
-        def call(*args, **opts):
-            resref = future()
-            self.calls.append((name, args, opts, resref,))
-            return resref
-        return call
-    def submit(self):
-        pass
-
-class localbatch(batcher):
-    '''performs the queued calls directly'''
-    def __init__(self, local):
-        batcher.__init__(self)
-        self.local = local
-    def submit(self):
-        for name, args, opts, resref in self.calls:
-            resref.set(getattr(self.local, name)(*args, **opts))
-
-class remotebatch(batcher):
+class remotebatch(peer.batcher):
     '''batches the queued calls; uses as few roundtrips as possible'''
     def __init__(self, remote):
         '''remote must support _submitbatch(encbatch) and
         _submitone(op, encargs)'''
-        batcher.__init__(self)
+        peer.batcher.__init__(self)
         self.remote = remote
     def submit(self):
         req, rsp = [], []
@@ -128,41 +110,10 @@
             encresref.set(encres)
             resref.set(batchable.next())
 
-def batchable(f):
-    '''annotation for batchable methods
-
-    Such methods must implement a coroutine as follows:
-
-    @batchable
-    def sample(self, one, two=None):
-        # Handle locally computable results first:
-        if not one:
-            yield "a local result", None
-        # Build list of encoded arguments suitable for your wire protocol:
-        encargs = [('one', encode(one),), ('two', encode(two),)]
-        # Create future for injection of encoded result:
-        encresref = future()
-        # Return encoded arguments and future:
-        yield encargs, encresref
-        # Assuming the future to be filled with the result from the batched
-        # request now. Decode it:
-        yield decode(encresref.value)
-
-    The decorator returns a function which wraps this coroutine as a plain
-    method, but adds the original method as an attribute called "batchable",
-    which is used by remotebatch to split the call into separate encoding and
-    decoding phases.
-    '''
-    def plain(*args, **opts):
-        batchable = f(*args, **opts)
-        encargsorres, encresref = batchable.next()
-        if not encresref:
-            return encargsorres # a local result in this case
-        self = args[0]
-        encresref.set(self._submitone(f.func_name, encargsorres))
-        return batchable.next()
-    setattr(plain, 'batchable', f)
-    return plain
+# Forward a couple of names from peer to make wireproto interactions
+# slightly more sensible.
+batchable = peer.batchable
+future = peer.future
 
 # list of nodes encoding / decoding
 
@@ -209,14 +160,18 @@
              'obsmarkers': 'boolean',
              'bundlecaps': 'scsv',
              'listkeys': 'csv',
-             'cg': 'boolean'}
+             'cg': 'boolean',
+             'cbattempted': 'boolean'}
 
 # client side
 
 class wirepeer(peer.peerrepository):
 
     def batch(self):
-        return remotebatch(self)
+        if self.capable('batch'):
+            return remotebatch(self)
+        else:
+            return peer.localbatch(self)
     def _submitbatch(self, req):
         cmds = []
         for op, argsdict in req:
@@ -610,7 +565,7 @@
     """
     # copy to prevent modification of the global list
     caps = list(wireprotocaps)
-    if _allowstream(repo.ui):
+    if streamclone.allowservergeneration(repo.ui):
         if repo.ui.configbool('server', 'preferuncompressed', False):
             caps.append('stream-preferred')
         requiredformats = repo.requirements & repo.supportedformats
@@ -671,7 +626,12 @@
         elif keytype == 'scsv':
             opts[k] = set(v.split(','))
         elif keytype == 'boolean':
-            opts[k] = bool(v)
+            # Client should serialize False as '0', which is a non-empty string
+            # so it evaluates as a True bool.
+            if v == '0':
+                opts[k] = False
+            else:
+                opts[k] = bool(v)
         elif keytype != 'plain':
             raise KeyError('unknown getbundle option type %s'
                            % keytype)
@@ -736,7 +696,7 @@
         try:
             r = repo.pushkey(encoding.tolocal(namespace), encoding.tolocal(key),
                              encoding.tolocal(old), new) or False
-        except util.Abort:
+        except error.Abort:
             r = False
 
         output = proto.restore()
@@ -747,16 +707,13 @@
                      encoding.tolocal(old), new)
     return '%s\n' % int(r)
 
-def _allowstream(ui):
-    return ui.configbool('server', 'uncompressed', True, untrusted=True)
-
 @wireprotocommand('stream_out')
 def stream(repo, proto):
     '''If the server supports streaming clone, it advertises the "stream"
     capability with a value representing the version and flags of the repo
     it is serving. Client checks to see if it understands the format.
     '''
-    if not _allowstream(repo.ui):
+    if not streamclone.allowservergeneration(repo.ui):
         return '1\n'
 
     def getstream(it):
@@ -767,7 +724,7 @@
     try:
         # LockError may be raised before the first result is yielded. Don't
         # emit output until we're sure we got the lock successfully.
-        it = exchange.generatestreamclone(repo)
+        it = streamclone.generatev1wireproto(repo)
         return streamres(getstream(it))
     except error.LockError:
         return '2\n'
@@ -801,12 +758,12 @@
             fp.close()
             os.unlink(tempname)
 
-    except (error.BundleValueError, util.Abort, error.PushRaced) as exc:
+    except (error.BundleValueError, error.Abort, error.PushRaced) as exc:
         # handle non-bundle2 case first
         if not getattr(exc, 'duringunbundle2', False):
             try:
                 raise
-            except util.Abort:
+            except error.Abort:
                 # The old code we moved used sys.stderr directly.
                 # We did not change it to minimise code change.
                 # This need to be moved to something proper.
@@ -847,7 +804,7 @@
                 errpart.addparam('parttype', exc.parttype)
             if exc.params:
                 errpart.addparam('params', '\0'.join(exc.params))
-        except util.Abort as exc:
+        except error.Abort as exc:
             manargs = [('message', str(exc))]
             advargs = []
             if exc.hint is not None:
--- a/mercurial/worker.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/mercurial/worker.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,9 +5,16 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-from i18n import _
-import errno, os, signal, sys, threading
-import util
+from __future__ import absolute_import
+
+import errno
+import os
+import signal
+import sys
+import threading
+
+from .i18n import _
+from . import error
 
 def countcpus():
     '''try to count the number of CPUs on the system'''
@@ -38,7 +45,7 @@
             if n >= 1:
                 return n
         except ValueError:
-            raise util.Abort(_('number of cpus must be an integer'))
+            raise error.Abort(_('number of cpus must be an integer'))
     return min(max(countcpus(), 4), 32)
 
 if os.name == 'posix':
--- a/setup.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/setup.py	Tue Oct 20 15:59:10 2015 -0500
@@ -70,7 +70,11 @@
 import shutil
 import tempfile
 from distutils import log
-from distutils.core import setup, Command, Extension
+if 'FORCE_SETUPTOOLS' in os.environ:
+    from setuptools import setup
+else:
+    from distutils.core import setup
+from distutils.core import Command, Extension
 from distutils.dist import Distribution
 from distutils.command.build import build
 from distutils.command.build_ext import build_ext
@@ -602,8 +606,8 @@
       version=setupversion,
       author='Matt Mackall and many others',
       author_email='mercurial@selenic.com',
-      url='http://mercurial.selenic.com/',
-      download_url='http://mercurial.selenic.com/release/',
+      url='https://mercurial-scm.org/',
+      download_url='https://mercurial-scm.org/release/',
       description=('Fast scalable distributed SCM (revision control, version '
                    'control) system'),
       long_description=('Mercurial is a distributed SCM tool written in Python.'
--- a/tests/README	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/README	Tue Oct 20 15:59:10 2015 -0500
@@ -3,5 +3,5 @@
 cd tests/
 python run-tests.py
 
-See http://mercurial.selenic.com/wiki/WritingTests for
+See https://mercurial-scm.org/wiki/WritingTests for
 more information on writing tests.
--- a/tests/autodiff.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/autodiff.py	Tue Oct 20 15:59:10 2015 -0500
@@ -1,7 +1,7 @@
 # Extension dedicated to test patch.diff() upgrade modes
 #
 #
-from mercurial import cmdutil, scmutil, patch, util
+from mercurial import cmdutil, scmutil, patch, error
 
 cmdtable = {}
 command = cmdutil.command(cmdtable)
@@ -30,9 +30,9 @@
         diffopts.git = False
         diffopts.upgrade = True
         def losedatafn(fn=None, **kwargs):
-            raise util.Abort('losing data for %s' % fn)
+            raise error.Abort('losing data for %s' % fn)
     else:
-        raise util.Abort('--git must be yes, no or auto')
+        raise error.Abort('--git must be yes, no or auto')
 
     node1, node2 = scmutil.revpair(repo, [])
     m = scmutil.match(repo[node2], pats, opts)
--- a/tests/bzr-definitions	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/bzr-definitions	Tue Oct 20 15:59:10 2015 -0500
@@ -1,7 +1,5 @@
 # this file holds the definitions that are used in various bzr tests
 
-"$TESTDIR/hghave" bzr || exit 80
-
 TERM=dumb; export TERM
 echo '[extensions]' >> $HGRCPATH
 echo 'convert = ' >> $HGRCPATH
--- a/tests/fakedirstatewritetime.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/fakedirstatewritetime.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,7 +5,7 @@
 #   - 'workingctx._checklookup()' (= 'repo.status()')
 #   - 'committablectx.markcommitted()'
 
-from mercurial import context, extensions, parsers, util
+from mercurial import context, dirstate, extensions, parsers, util
 
 def pack_dirstate(fakenow, orig, dmap, copymap, pl, now):
     # execute what original parsers.pack_dirstate should do actually
@@ -31,17 +31,19 @@
 
     # parsing 'fakenow' in YYYYmmddHHMM format makes comparison between
     # 'fakenow' value and 'touch -t YYYYmmddHHMM' argument easy
-    timestamp = util.parsedate(fakenow, ['%Y%m%d%H%M'])[0]
-    fakenow = float(timestamp)
+    fakenow = util.parsedate(fakenow, ['%Y%m%d%H%M'])[0]
 
     orig_pack_dirstate = parsers.pack_dirstate
+    orig_dirstate_getfsnow = dirstate._getfsnow
     wrapper = lambda *args: pack_dirstate(fakenow, orig_pack_dirstate, *args)
 
     parsers.pack_dirstate = wrapper
+    dirstate._getfsnow = lambda *args: fakenow
     try:
         return func()
     finally:
         parsers.pack_dirstate = orig_pack_dirstate
+        dirstate._getfsnow = orig_dirstate_getfsnow
 
 def _checklookup(orig, workingctx, files):
     ui = workingctx.repo().ui
--- a/tests/filterpyflakes.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/filterpyflakes.py	Tue Oct 20 15:59:10 2015 -0500
@@ -2,7 +2,7 @@
 
 # Filter output by pyflakes to control which warnings we check
 
-import sys, re, os
+import sys, re
 
 def makekey(typeandline):
     """
@@ -42,7 +42,7 @@
     else:
         continue # no pattern matched, next line
     fn = line.split(':', 1)[0]
-    f = open(os.path.join(os.path.dirname(os.path.dirname(__file__)), fn))
+    f = open(fn)
     data = f.read()
     f.close()
     if 'no-' 'check-code' in data:
--- a/tests/hghave	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/hghave	Tue Oct 20 15:59:10 2015 -0500
@@ -30,10 +30,8 @@
                   help="test available features")
 parser.add_option("--list-features", action="store_true",
                   help="list available features")
-parser.add_option("-q", "--quiet", action="store_true",
-                  help="check features silently")
 
-def _loadaddon(quiet):
+def _loadaddon():
     if 'TESTDIR' in os.environ:
         # loading from '.' isn't needed, because `hghave` should be
         # running at TESTTMP in this case
@@ -48,15 +46,14 @@
     try:
         import hghaveaddon
     except BaseException, inst:
-        if not quiet:
-            sys.stderr.write('failed to import hghaveaddon.py from %r: %s\n'
-                             % (path, inst))
+        sys.stderr.write('failed to import hghaveaddon.py from %r: %s\n'
+                         % (path, inst))
         sys.exit(2)
     sys.path.pop(0)
 
 if __name__ == '__main__':
     options, args = parser.parse_args()
-    _loadaddon(options.quiet)
+    _loadaddon()
     if options.list_features:
         list_features()
         sys.exit(0)
@@ -64,36 +61,4 @@
     if options.test_features:
         sys.exit(test_features())
 
-    quiet = options.quiet
-
-    failures = 0
-
-    def error(msg):
-        global failures
-        if not quiet:
-            sys.stderr.write(msg + '\n')
-        failures += 1
-
-    for feature in args:
-        negate = feature.startswith('no-')
-        if negate:
-            feature = feature[3:]
-
-        if feature not in checks:
-            error('skipped: unknown feature: ' + feature)
-            sys.exit(2)
-
-        check, desc = checks[feature]
-        try:
-            available = check()
-        except Exception, e:
-            error('hghave check failed: ' + feature)
-            continue
-
-        if not negate and not available:
-            error('skipped: missing feature: ' + desc)
-        elif negate and available:
-            error('skipped: system supports %s' % desc)
-
-    if failures != 0:
-        sys.exit(1)
+    hghave.require(args)
--- a/tests/hghave.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/hghave.py	Tue Oct 20 15:59:10 2015 -0500
@@ -1,6 +1,9 @@
-import os, stat
+import errno
+import os
 import re
 import socket
+import stat
+import subprocess
 import sys
 import tempfile
 
@@ -17,19 +20,68 @@
         return func
     return decorator
 
+def checkfeatures(features):
+    result = {
+        'error': [],
+        'missing': [],
+        'skipped': [],
+    }
+
+    for feature in features:
+        negate = feature.startswith('no-')
+        if negate:
+            feature = feature[3:]
+
+        if feature not in checks:
+            result['missing'].append(feature)
+            continue
+
+        check, desc = checks[feature]
+        try:
+            available = check()
+        except Exception:
+            result['error'].append('hghave check failed: %s' % feature)
+            continue
+
+        if not negate and not available:
+            result['skipped'].append('missing feature: %s' % desc)
+        elif negate and available:
+            result['skipped'].append('system supports %s' % desc)
+
+    return result
+
+def require(features):
+    """Require that features are available, exiting if not."""
+    result = checkfeatures(features)
+
+    for missing in result['missing']:
+        sys.stderr.write('skipped: unknown feature: %s\n' % missing)
+    for msg in result['skipped']:
+        sys.stderr.write('skipped: %s\n' % msg)
+    for msg in result['error']:
+        sys.stderr.write('%s\n' % msg)
+
+    if result['missing']:
+        sys.exit(2)
+
+    if result['skipped'] or result['error']:
+        sys.exit(1)
+
 def matchoutput(cmd, regexp, ignorestatus=False):
     """Return True if cmd executes successfully and its output
     is matched by the supplied regular expression.
     """
     r = re.compile(regexp)
-    fh = os.popen(cmd)
-    s = fh.read()
     try:
-        ret = fh.close()
-    except IOError:
-        # Happen in Windows test environment
-        ret = 1
-    return (ignorestatus or ret is None) and r.search(s)
+        p = subprocess.Popen(
+            cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    except OSError as e:
+        if e.errno != errno.ENOENT:
+            raise
+        ret = -1
+    ret = p.wait()
+    s = p.stdout.read()
+    return (ignorestatus or not ret) and r.search(s)
 
 @check("baz", "GNU Arch baz client")
 def has_baz():
@@ -367,6 +419,33 @@
 def has_osx():
     return sys.platform == 'darwin'
 
+@check("docker", "docker support")
+def has_docker():
+    pat = r'A self-sufficient runtime for linux containers\.'
+    if matchoutput('docker --help', pat):
+        if 'linux' not in sys.platform:
+            # TODO: in theory we should be able to test docker-based
+            # package creation on non-linux using boot2docker, but in
+            # practice that requires extra coordination to make sure
+            # $TESTTEMP is going to be visible at the same path to the
+            # boot2docker VM. If we figure out how to verify that, we
+            # can use the following instead of just saying False:
+            # return 'DOCKER_HOST' in os.environ
+            return False
+
+        return True
+    return False
+
+@check("debhelper", "debian packaging tools")
+def has_debhelper():
+    dpkg = matchoutput('dpkg --version',
+                       "Debian `dpkg' package management program")
+    dh = matchoutput('dh --help',
+                     'dh is a part of debhelper.', ignorestatus=True)
+    dh_py2 = matchoutput('dh_python2 --help',
+                         'other supported Python versions')
+    return dpkg and dh and dh_py2
+
 @check("absimport", "absolute_import in __future__")
 def has_absimport():
     import __future__
@@ -380,3 +459,7 @@
 @check("pure", "running with pure Python code")
 def has_pure():
     return os.environ.get("HGTEST_RUN_TESTS_PURE") == "--pure"
+
+@check("slow", "allow slow tests")
+def has_slow():
+    return os.environ.get('HGTEST_SLOW') == 'slow'
--- a/tests/run-tests.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/run-tests.py	Tue Oct 20 15:59:10 2015 -0500
@@ -35,6 +35,8 @@
 #      ./run-tests.py -j2 -c --local test-s*  # unsupported (and broken)
 #  9) parallel, custom tmp dir:
 #      ./run-tests.py -j2 --tmpdir /tmp/myhgtests
+#  10) parallel, pure, tests that call run-tests:
+#      ./run-tests.py --pure `grep -l run-tests.py *.t`
 #
 # (You could use any subset of the tests: test-s* happens to match
 # enough that it's worth doing parallel runs, few enough that it
@@ -259,6 +261,8 @@
                       help='run tests in random order')
     parser.add_option('--profile-runner', action='store_true',
                       help='run statprof on run-tests')
+    parser.add_option('--allow-slow-tests', action='store_true',
+                      help='allow extremely slow tests')
 
     for option, (envvar, default) in defaults.items():
         defaults[option] = type(default)(os.environ.get(envvar, default))
@@ -661,7 +665,10 @@
             killdaemons(entry)
         self._daemonpids = []
 
-        if not self._keeptmpdir:
+        if self._keeptmpdir:
+            log('\nKeeping testtmp dir: %s\nKeeping threadtmp dir: %s' %
+                (self._testtmp, self._threadtmp))
+        else:
             shutil.rmtree(self._testtmp, True)
             shutil.rmtree(self._threadtmp, True)
 
@@ -1083,7 +1090,7 @@
             # clean up any optional leftovers
             while expected.get(pos, None):
                 el = expected[pos].pop(0)
-                if not el.endswith(" (?)\n"):
+                if not el.endswith(b" (?)\n"):
                     expected[pos].insert(0, el)
                     break
                 postout.append(b'  ' + el)
@@ -1153,7 +1160,7 @@
         if el == l: # perfect match (fast)
             return True
         if el:
-            if el.endswith(" (?)\n"):
+            if el.endswith(b" (?)\n"):
                 retry = "retry"
                 el = el[:-5] + "\n"
             if el.endswith(b" (esc)\n"):
@@ -1835,6 +1842,11 @@
         if self.options.pure:
             os.environ["HGTEST_RUN_TESTS_PURE"] = "--pure"
 
+        if self.options.allow_slow_tests:
+            os.environ["HGTEST_SLOW"] = "slow"
+        elif 'HGTEST_SLOW' in os.environ:
+            del os.environ['HGTEST_SLOW']
+
         self._coveragefile = os.path.join(self._testdir, b'.coverage')
 
         vlog("# Using TESTDIR", self._testdir)
@@ -2078,7 +2090,11 @@
         vlog("# Running", cmd)
         if os.system(cmd) == 0:
             if not self.options.verbose:
-                os.remove(installerrs)
+                try:
+                    os.remove(installerrs)
+                except OSError as e:
+                    if e.errno != errno.ENOENT:
+                        raise
         else:
             f = open(installerrs, 'rb')
             for line in f:
--- a/tests/test-abort-checkin.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-abort-checkin.t	Tue Oct 20 15:59:10 2015 -0500
@@ -1,7 +1,7 @@
   $ cat > abortcommit.py <<EOF
-  > from mercurial import util
+  > from mercurial import error
   > def hook(**args):
-  >     raise util.Abort("no commits allowed")
+  >     raise error.Abort("no commits allowed")
   > def reposetup(ui, repo):
   >     repo.ui.setconfig("hooks", "pretxncommit.nocommits", hook)
   > EOF
--- a/tests/test-add.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-add.t	Tue Oct 20 15:59:10 2015 -0500
@@ -97,8 +97,7 @@
 
   $ hg merge
   merging a
-  warning: conflicts during merge.
-  merging a incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
   0 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
   [1]
--- a/tests/test-addremove-similar.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-addremove-similar.t	Tue Oct 20 15:59:10 2015 -0500
@@ -69,7 +69,7 @@
 
   $ cd ..
 
-Issue1527: repeated addremove causes util.Abort
+Issue1527: repeated addremove causes Abort
 
   $ hg init rep3; cd rep3
   $ mkdir d
--- a/tests/test-annotate.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-annotate.t	Tue Oct 20 15:59:10 2015 -0500
@@ -299,7 +299,7 @@
 and its ancestor by overriding "repo._filecommit".
 
   $ cat > ../legacyrepo.py <<EOF
-  > from mercurial import node, util
+  > from mercurial import node, error
   > def reposetup(ui, repo):
   >     class legacyrepo(repo.__class__):
   >         def _filecommit(self, fctx, manifest1, manifest2,
@@ -312,12 +312,12 @@
   >             meta = {}
   >             copy = fctx.renamed()
   >             if copy and copy[0] != fname:
-  >                 raise util.Abort('copying is not supported')
+  >                 raise error.Abort('copying is not supported')
   >             if fparent2 != node.nullid:
   >                 changelist.append(fname)
   >                 return flog.add(text, meta, tr, linkrev,
   >                                 fparent1, fparent2)
-  >             raise util.Abort('only merging is supported')
+  >             raise error.Abort('only merging is supported')
   >     repo.__class__ = legacyrepo
   > EOF
 
--- a/tests/test-backout.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-backout.t	Tue Oct 20 15:59:10 2015 -0500
@@ -259,6 +259,95 @@
   line 2
   line 3
 
+Test visibility of in-memory dirstate changes outside transaction to
+external hook process
+
+  $ cat > $TESTTMP/checkvisibility.sh <<EOF
+  > echo "==== \$1:"
+  > hg parents --template "{rev}:{node|short}\n"
+  > echo "===="
+  > EOF
+
+"hg backout --merge REV1" at REV2 below implies steps below:
+
+(1) update to REV1 (REV2 => REV1)
+(2) revert by REV1^1
+(3) commit backnig out revision (REV3)
+(4) update to REV2 (REV3 => REV2)
+(5) merge with REV3 (REV2 => REV2, REV3)
+
+== test visibility to external preupdate hook
+
+  $ hg update -q -C 2
+  $ hg --config extensions.strip= strip 3
+  saved backup bundle to * (glob)
+
+  $ cat >> .hg/hgrc <<EOF
+  > [hooks]
+  > preupdate.visibility = sh $TESTTMP/checkvisibility.sh preupdate
+  > EOF
+
+("-m" is needed to avoid writing dirstte changes out at other than
+invocation of the hook to be examined)
+
+  $ hg backout --merge -d '3 0' 1 --tool=true -m 'fixed comment'
+  ==== preupdate:
+  2:6ea3f2a197a2
+  ====
+  reverting a
+  created new head
+  changeset 3:d92a3f57f067 backs out changeset 1:5a50a024c182
+  ==== preupdate:
+  3:d92a3f57f067
+  ====
+  merging with changeset 3:d92a3f57f067
+  ==== preupdate:
+  2:6ea3f2a197a2
+  ====
+  merging a
+  0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+  (branch merge, don't forget to commit)
+
+  $ cat >> .hg/hgrc <<EOF
+  > [hooks]
+  > preupdate.visibility =
+  > EOF
+
+== test visibility to external update hook
+
+  $ hg update -q -C 2
+  $ hg --config extensions.strip= strip 3
+  saved backup bundle to * (glob)
+
+  $ cat >> .hg/hgrc <<EOF
+  > [hooks]
+  > update.visibility = sh $TESTTMP/checkvisibility.sh update
+  > EOF
+
+  $ hg backout --merge -d '3 0' 1 --tool=true -m 'fixed comment'
+  ==== update:
+  1:5a50a024c182
+  ====
+  reverting a
+  created new head
+  changeset 3:d92a3f57f067 backs out changeset 1:5a50a024c182
+  ==== update:
+  2:6ea3f2a197a2
+  ====
+  merging with changeset 3:d92a3f57f067
+  merging a
+  ==== update:
+  2:6ea3f2a197a2
+  3:d92a3f57f067
+  ====
+  0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+  (branch merge, don't forget to commit)
+
+  $ cat >> .hg/hgrc <<EOF
+  > [hooks]
+  > update.visibility =
+  > EOF
+
   $ cd ..
 
 backout should not back out subsequent changesets
@@ -593,6 +682,23 @@
   use 'hg resolve' to retry unresolved file merges
   [1]
   $ hg status
+  $ hg debugmergestate
+  * version 2 records
+  local: b71750c4b0fdf719734971e3ef90dbeab5919a2d
+  other: a30dd8addae3ce71b8667868478542bc417439e6
+  file: foo (state "u", hash 0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33)
+    local path: foo (flags "")
+    ancestor path: foo (node f89532f44c247a0e993d63e3a734dd781ab04708)
+    other path: foo (node f50039b486d6fa1a90ae51778388cad161f425ee)
+  $ mv .hg/merge/state2 .hg/merge/state2-moved
+  $ hg debugmergestate
+  * version 1 records
+  local: b71750c4b0fdf719734971e3ef90dbeab5919a2d
+  file: foo (state "u", hash 0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33)
+    local path: foo (flags "")
+    ancestor path: foo (node f89532f44c247a0e993d63e3a734dd781ab04708)
+    other path: foo (node not stored in v1 format)
+  $ mv .hg/merge/state2-moved .hg/merge/state2
   $ hg resolve -l  # still unresolved
   U foo
   $ hg summary
@@ -603,7 +709,7 @@
   update: (current)
   phases: 3 draft
   $ hg resolve --all --debug
-  picked tool 'internal:merge' for foo (binary False symlink False)
+  picked tool ':merge' for foo (binary False symlink False)
   merging foo
   my foo@b71750c4b0fd+ other foo@a30dd8addae3 ancestor foo@913609522437
    premerge successful
--- a/tests/test-bad-extension.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-bad-extension.t	Tue Oct 20 15:59:10 2015 -0500
@@ -9,31 +9,26 @@
   > badext2 =
   > EOF
 
-  $ hg -q help help
+  $ hg -q help help 2>&1 |grep extension
   *** failed to import extension badext from $TESTTMP/badext.py: bit bucket overflow
   *** failed to import extension badext2: No module named badext2
-  hg help [-ec] [TOPIC]
-  
-  show help for a given topic or a help overview
 
 show traceback
 
-  $ hg -q help help --traceback 2>&1 | grep -v '^  '
+  $ hg -q help help --traceback 2>&1 | egrep ' extension|^Exception|Traceback|ImportError'
   *** failed to import extension badext from $TESTTMP/badext.py: bit bucket overflow
   Traceback (most recent call last):
   Exception: bit bucket overflow
   *** failed to import extension badext2: No module named badext2
   Traceback (most recent call last):
   ImportError: No module named badext2
-  hg help [-ec] [TOPIC]
-  
-  show help for a given topic or a help overview
 
 show traceback for ImportError of hgext.name if debug is set
 (note that --debug option isn't applied yet when loading extensions)
 
-  $ hg help help --traceback --config ui.debug=True 2>&1 \
-  > | grep -v '^  ' | head -n10
+  $ (hg -q help help --traceback --config ui.debug=True 2>&1) \
+  > | grep -v '^ ' \
+  > | egrep 'extension..[^p]|^Exception|Traceback|ImportError|not import'
   *** failed to import extension badext from $TESTTMP/badext.py: bit bucket overflow
   Traceback (most recent call last):
   Exception: bit bucket overflow
@@ -43,4 +38,3 @@
   *** failed to import extension badext2: No module named badext2
   Traceback (most recent call last):
   ImportError: No module named badext2
-  hg help [-ec] [TOPIC]
--- a/tests/test-batching.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-batching.py	Tue Oct 20 15:59:10 2015 -0500
@@ -5,7 +5,8 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-from mercurial.wireproto import localbatch, remotebatch, batchable, future
+from mercurial.peer import localbatch, batchable, future
+from mercurial.wireproto import remotebatch
 
 # equivalent of repo.repository
 class thing(object):
--- a/tests/test-blackbox.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-blackbox.t	Tue Oct 20 15:59:10 2015 -0500
@@ -13,8 +13,8 @@
   $ echo a > a
   $ hg add a
   $ hg blackbox
-  1970/01/01 00:00:00 bob> add a
-  1970/01/01 00:00:00 bob> add a exited 0 after * seconds (glob)
+  1970/01/01 00:00:00 bob (*)> add a (glob)
+  1970/01/01 00:00:00 bob (*)> add a exited 0 after * seconds (glob)
 
 incoming change tracking
 
@@ -44,11 +44,11 @@
   added 1 changesets with 1 changes to 1 files
   (run 'hg update' to get a working copy)
   $ hg blackbox -l 5
-  1970/01/01 00:00:00 bob> pull
-  1970/01/01 00:00:00 bob> updated served branch cache in ?.???? seconds (glob)
-  1970/01/01 00:00:00 bob> wrote served branch cache with 1 labels and 2 nodes
-  1970/01/01 00:00:00 bob> 1 incoming changes - new heads: d02f48003e62
-  1970/01/01 00:00:00 bob> pull exited 0 after * seconds (glob)
+  1970/01/01 00:00:00 bob (*)> pull (glob)
+  1970/01/01 00:00:00 bob (*)> updated served branch cache in ?.???? seconds (glob)
+  1970/01/01 00:00:00 bob (*)> wrote served branch cache with 1 labels and 2 nodes (glob)
+  1970/01/01 00:00:00 bob (*)> 1 incoming changes - new heads: d02f48003e62 (glob)
+  1970/01/01 00:00:00 bob (*)> pull exited 0 after * seconds (glob)
 
 we must not cause a failure if we cannot write to the log
 
@@ -106,11 +106,11 @@
   0 files updated, 0 files merged, 1 files removed, 0 files unresolved
   saved backup bundle to $TESTTMP/blackboxtest2/.hg/strip-backup/*-backup.hg (glob)
   $ hg blackbox -l 5
-  1970/01/01 00:00:00 bob> strip tip
-  1970/01/01 00:00:00 bob> saved backup bundle to $TESTTMP/blackboxtest2/.hg/strip-backup/*-backup.hg (glob)
-  1970/01/01 00:00:00 bob> updated base branch cache in ?.???? seconds (glob)
-  1970/01/01 00:00:00 bob> wrote base branch cache with 1 labels and 2 nodes
-  1970/01/01 00:00:00 bob> strip tip exited 0 after * seconds (glob)
+  1970/01/01 00:00:00 bob (*)> strip tip (glob)
+  1970/01/01 00:00:00 bob (*)> saved backup bundle to $TESTTMP/blackboxtest2/.hg/strip-backup/*-backup.hg (glob)
+  1970/01/01 00:00:00 bob (*)> updated base branch cache in ?.???? seconds (glob)
+  1970/01/01 00:00:00 bob (*)> wrote base branch cache with 1 labels and 2 nodes (glob)
+  1970/01/01 00:00:00 bob (*)> strip tip exited 0 after * seconds (glob)
 
 extension and python hooks - use the eol extension for a pythonhook
 
@@ -119,14 +119,14 @@
   $ echo '[hooks]' >> .hg/hgrc
   $ echo 'update = echo hooked' >> .hg/hgrc
   $ hg update
+  hooked
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  hooked
   $ hg blackbox -l 5
-  1970/01/01 00:00:00 bob> update
-  1970/01/01 00:00:00 bob> writing .hg/cache/tags2-visible with 0 tags
-  1970/01/01 00:00:00 bob> pythonhook-preupdate: hgext.eol.preupdate finished in * seconds (glob)
-  1970/01/01 00:00:00 bob> exthook-update: echo hooked finished in * seconds (glob)
-  1970/01/01 00:00:00 bob> update exited 0 after * seconds (glob)
+  1970/01/01 00:00:00 bob (*)> update (glob)
+  1970/01/01 00:00:00 bob (*)> writing .hg/cache/tags2-visible with 0 tags (glob)
+  1970/01/01 00:00:00 bob (*)> pythonhook-preupdate: hgext.eol.preupdate finished in * seconds (glob)
+  1970/01/01 00:00:00 bob (*)> exthook-update: echo hooked finished in * seconds (glob)
+  1970/01/01 00:00:00 bob (*)> update exited 0 after * seconds (glob)
 
 log rotation
 
--- a/tests/test-bookmarks-pushpull.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-bookmarks-pushpull.t	Tue Oct 20 15:59:10 2015 -0500
@@ -539,7 +539,7 @@
   $ hg clone -U source repo1
 
 (test that incoming/outgoing exit with 1, if there is no bookmark to
-be excahnged)
+be exchanged)
 
   $ hg -R repo1 incoming -B
   comparing with $TESTTMP/bmcomparison/source
--- a/tests/test-bookmarks-rebase.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-bookmarks-rebase.t	Tue Oct 20 15:59:10 2015 -0500
@@ -79,8 +79,7 @@
   $ hg rebase -s three -d two
   rebasing 4:dd7c838e8362 "4" (tip three)
   merging d
-  warning: conflicts during merge.
-  merging d incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging d! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see hg resolve, then hg rebase --continue)
   [1]
   $ hg rebase --abort
@@ -95,8 +94,7 @@
   $ hg rebase -s three -d two
   rebasing 4:dd7c838e8362 "4" (tip three)
   merging d
-  warning: conflicts during merge.
-  merging d incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging d! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see hg resolve, then hg rebase --continue)
   [1]
   $ hg bookmark -d three
--- a/tests/test-bookmarks.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-bookmarks.t	Tue Oct 20 15:59:10 2015 -0500
@@ -135,6 +135,9 @@
   $ hg log -r 'bookmark(unknown)'
   abort: bookmark 'unknown' does not exist!
   [255]
+  $ hg log -r 'bookmark("literal:unknown")'
+  abort: bookmark 'unknown' does not exist!
+  [255]
   $ hg log -r 'bookmark("re:unknown")'
   abort: no bookmarks exist that match 'unknown'!
   [255]
@@ -511,10 +514,10 @@
 
 test clone with update to a bookmark
 
-  $ hg clone -u Z . cloned-bookmarks-update
+  $ hg clone -u Z . ../cloned-bookmarks-update
   updating to branch default
   2 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  $ hg -R cloned-bookmarks-update bookmarks
+  $ hg -R ../cloned-bookmarks-update bookmarks
      X2                        1:925d80f479bb
      Y                         2:db815d6d32e6
    * Z                         2:db815d6d32e6
@@ -569,10 +572,40 @@
 
   $ hg bookmark -r3 Y
   moving bookmark 'Y' forward from db815d6d32e6
-  $ hg -R cloned-bookmarks-update update Y
+  $ cp -r  ../cloned-bookmarks-update ../cloned-bookmarks-manual-update
+
+(manual version)
+
+  $ hg -R ../cloned-bookmarks-manual-update update Y
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   (activating bookmark Y)
-  $ hg -R cloned-bookmarks-update pull --update .
+  $ hg -R ../cloned-bookmarks-manual-update pull .
+  pulling from .
+  searching for changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 2 changesets with 2 changes to 2 files (+1 heads)
+  updating bookmark Y
+  updating bookmark Z
+  (run 'hg heads' to see heads, 'hg merge' to merge)
+
+(# tests strange but with --date crashing when bookmark have to move)
+
+  $ hg -R ../cloned-bookmarks-manual-update update -d 1986
+  abort: revision matching date not found
+  [255]
+  $ hg -R ../cloned-bookmarks-manual-update update
+  updating to active bookmark Y
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  (activating bookmark Y)
+
+(all in one version)
+
+  $ hg -R ../cloned-bookmarks-update update Y
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  (activating bookmark Y)
+  $ hg -R ../cloned-bookmarks-update pull --update .
   pulling from .
   searching for changes
   adding changesets
@@ -699,6 +732,19 @@
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   (activating bookmark four)
 
+no-op update doesn't deactive bookmarks
+
+  $ hg up
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ hg sum
+  parent: 3:9ba5f110a0b3 tip
+   y
+  branch: test
+  bookmarks: *four
+  commit: 2 unknown (clean)
+  update: (current)
+  phases: 4 draft
+
 test clearing divergent bookmarks of linear ancestors
 
   $ hg bookmark Z -r 0
--- a/tests/test-bundle-type.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-bundle-type.t	Tue Oct 20 15:59:10 2015 -0500
@@ -29,57 +29,63 @@
 
 test bundle types
 
-  $ for t in "None" "bzip2" "gzip"; do
+  $ for t in "None" "bzip2" "gzip" "none-v2" "v2" "v1" "gzip-v1"; do
   >   echo % test bundle type $t
   >   hg init t$t
   >   cd t1
   >   hg bundle -t $t ../b$t ../t$t
   >   cut -b 1-6 ../b$t | head -n 1
   >   cd ../t$t
-  >   hg pull ../b$t
-  >   hg up
-  >   hg log | grep summary
+  >   hg debugbundle ../b$t
+  >   echo
   >   cd ..
   > done
   % test bundle type None
   searching for changes
   1 changesets found
   HG10UN
-  pulling from ../bNone
-  requesting all changes
-  adding changesets
-  adding manifests
-  adding file changes
-  added 1 changesets with 1 changes to 1 files
-  (run 'hg update' to get a working copy)
-  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  summary:     a
+  c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf
+  
   % test bundle type bzip2
   searching for changes
   1 changesets found
   HG10BZ
-  pulling from ../bbzip2
-  requesting all changes
-  adding changesets
-  adding manifests
-  adding file changes
-  added 1 changesets with 1 changes to 1 files
-  (run 'hg update' to get a working copy)
-  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  summary:     a
+  c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf
+  
   % test bundle type gzip
   searching for changes
   1 changesets found
   HG10GZ
-  pulling from ../bgzip
-  requesting all changes
-  adding changesets
-  adding manifests
-  adding file changes
-  added 1 changesets with 1 changes to 1 files
-  (run 'hg update' to get a working copy)
-  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  summary:     a
+  c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf
+  
+  % test bundle type none-v2
+  searching for changes
+  1 changesets found
+  HG20\x00\x00 (esc)
+  Stream params: {}
+  changegroup -- "{'version': '02'}"
+      c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf
+  
+  % test bundle type v2
+  searching for changes
+  1 changesets found
+  HG20\x00\x00 (esc)
+  Stream params: {'Compression': 'BZ'}
+  changegroup -- "{'version': '02'}"
+      c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf
+  
+  % test bundle type v1
+  searching for changes
+  1 changesets found
+  HG10BZ
+  c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf
+  
+  % test bundle type gzip-v1
+  searching for changes
+  1 changesets found
+  HG10GZ
+  c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf
+  
 
 test garbage file
 
@@ -96,6 +102,7 @@
 
   $ cd t1
   $ hg bundle -a -t garbage ../bgarbage
-  abort: unknown bundle type specified with --type
+  abort: garbage is not a recognized bundle specification
+  (see "hg help bundle" for supported values for --type)
   [255]
   $ cd ..
--- a/tests/test-bundle.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-bundle.t	Tue Oct 20 15:59:10 2015 -0500
@@ -250,6 +250,64 @@
   changegroup hook: HG_NODE=f9ee2f85a263049e9ae6d37a0e67e96194ffb735 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=bundle:empty+full.hg (glob)
   (run 'hg heads' to see heads, 'hg merge' to merge)
 
+Cannot produce streaming clone bundles with "hg bundle"
+
+  $ hg -R test bundle -t packed1 packed.hg
+  abort: packed bundles cannot be produced by "hg bundle"
+  (use "hg debugcreatestreamclonebundle")
+  [255]
+
+packed1 is produced properly
+
+  $ hg -R test debugcreatestreamclonebundle packed.hg
+  writing 2608 bytes for 6 files
+  bundle requirements: revlogv1
+
+  $ f -B 64 --size --sha1 --hexdump packed.hg
+  packed.hg: size=2758, sha1=864c1c7b490bac9f2950ef5a660668378ac0524e
+  0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 06 00 00 |HGS1UN..........|
+  0010: 00 00 00 00 0a 30 00 09 72 65 76 6c 6f 67 76 31 |.....0..revlogv1|
+  0020: 00 64 61 74 61 2f 61 64 69 66 66 65 72 65 6e 74 |.data/adifferent|
+  0030: 66 69 6c 65 2e 69 00 31 33 39 0a 00 01 00 01 00 |file.i.139......|
+
+generaldelta requirement is listed in stream clone bundles
+
+  $ hg --config format.generaldelta=true init testgd
+  $ cd testgd
+  $ touch foo
+  $ hg -q commit -A -m initial
+  $ cd ..
+  $ hg -R testgd debugcreatestreamclonebundle packedgd.hg
+  writing 301 bytes for 3 files
+  bundle requirements: generaldelta, revlogv1
+
+  $ f -B 64 --size --sha1 --hexdump packedgd.hg
+  packedgd.hg: size=396, sha1=981f9e589799335304a5a9a44caa3623a48d2a9f
+  0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 03 00 00 |HGS1UN..........|
+  0010: 00 00 00 00 01 2d 00 16 67 65 6e 65 72 61 6c 64 |.....-..generald|
+  0020: 65 6c 74 61 2c 72 65 76 6c 6f 67 76 31 00 64 61 |elta,revlogv1.da|
+  0030: 74 61 2f 66 6f 6f 2e 69 00 36 34 0a 00 03 00 01 |ta/foo.i.64.....|
+
+Unpacking packed1 bundles with "hg unbundle" isn't allowed
+
+  $ hg init packed
+  $ hg -R packed unbundle packed.hg
+  abort: packed bundles cannot be applied with "hg unbundle"
+  (use "hg debugapplystreamclonebundle")
+  [255]
+
+packed1 can be consumed from debug command
+
+  $ hg -R packed debugapplystreamclonebundle packed.hg
+  6 files to transfer, 2.55 KB of data
+  transferred 2.55 KB in *.* seconds (*) (glob)
+
+Does not work on non-empty repo
+
+  $ hg -R packed debugapplystreamclonebundle packed.hg
+  abort: cannot apply stream clone bundle on non-empty repo
+  [255]
+
 Create partial clones
 
   $ rm -r empty
@@ -437,7 +495,7 @@
   abort: empty destination path is not valid
   [255]
 
-test for http://mercurial.selenic.com/bts/issue216
+test for https://bz.mercurial-scm.org/216
 
 Unbundle incremental bundles into fresh empty in one go
 
@@ -551,7 +609,7 @@
 
   $ cd ..
 
-test for http://mercurial.selenic.com/bts/issue1144
+test for https://bz.mercurial-scm.org/1144
 
 test that verify bundle does not traceback
 
--- a/tests/test-bundle2-exchange.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-bundle2-exchange.t	Tue Oct 20 15:59:10 2015 -0500
@@ -7,6 +7,7 @@
 
 enable obsolescence
 
+  $ cp $HGRCPATH $TESTTMP/hgrc.orig
   $ cat > $TESTTMP/bundle2-pushkey-hook.sh << EOF
   > echo pushkey: lock state after \"\$HG_NAMESPACE\"
   > hg debuglock
@@ -452,7 +453,7 @@
   > used to test error handling in bundle2
   > """
   > 
-  > from mercurial import util
+  > from mercurial import error
   > from mercurial import bundle2
   > from mercurial import exchange
   > from mercurial import extensions
@@ -470,7 +471,7 @@
   > 
   > @bundle2.parthandler("test:abort")
   > def handleabort(op, part):
-  >     raise util.Abort('Abandon ship!', hint="don't panic")
+  >     raise error.Abort('Abandon ship!', hint="don't panic")
   > 
   > def uisetup(ui):
   >     exchange.b2partsgenmapping['failpart'] = _pushbundle2failpart
@@ -897,3 +898,47 @@
   abort: Clown phase push failed
   [255]
 
+Test lazily acquiring the lock during unbundle
+  $ cp $TESTTMP/hgrc.orig $HGRCPATH
+  $ cat >> $HGRCPATH <<EOF
+  > [ui]
+  > ssh=python "$TESTDIR/dummyssh"
+  > EOF
+
+  $ cat >> $TESTTMP/locktester.py <<EOF
+  > import os
+  > from mercurial import extensions, bundle2, util
+  > def checklock(orig, repo, *args, **kwargs):
+  >     if repo.svfs.lexists("lock"):
+  >         raise util.Abort("Lock should not be taken")
+  >     return orig(repo, *args, **kwargs)
+  > def extsetup(ui):
+  >    extensions.wrapfunction(bundle2, 'processbundle', checklock)
+  > EOF
+
+  $ hg init lazylock
+  $ cat >> lazylock/.hg/hgrc <<EOF
+  > [extensions]
+  > locktester=$TESTTMP/locktester.py
+  > EOF
+
+  $ hg clone -q ssh://user@dummy/lazylock lazylockclient
+  $ cd lazylockclient
+  $ touch a && hg ci -Aqm a
+  $ hg push
+  pushing to ssh://user@dummy/lazylock
+  searching for changes
+  abort: Lock should not be taken
+  [255]
+
+  $ cat >> ../lazylock/.hg/hgrc <<EOF
+  > [experimental]
+  > bundle2lazylocking=True
+  > EOF
+  $ hg push
+  pushing to ssh://user@dummy/lazylock
+  searching for changes
+  remote: adding changesets
+  remote: adding manifests
+  remote: adding file changes
+  remote: added 1 changesets with 1 changes to 1 files
--- a/tests/test-bundle2-format.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-bundle2-format.t	Tue Oct 20 15:59:10 2015 -0500
@@ -78,7 +78,9 @@
   >           ('', 'reply', False, 'produce a reply bundle'),
   >           ('', 'pushrace', False, 'includes a check:head part with unknown nodes'),
   >           ('', 'genraise', False, 'includes a part that raise an exception during generation'),
-  >           ('r', 'rev', [], 'includes those changeset in the bundle'),],
+  >           ('', 'timeout', False, 'emulate a timeout during bundle generation'),
+  >           ('r', 'rev', [], 'includes those changeset in the bundle'),
+  >           ('', 'compress', '', 'compress the stream'),],
   >          '[OUTPUTFILE]')
   > def cmdbundle2(ui, repo, path=None, **opts):
   >     """write a bundle2 container on standard output"""
@@ -88,7 +90,10 @@
   >         try:
   >             bundler.addparam(*p)
   >         except ValueError, exc:
-  >             raise util.Abort('%s' % exc)
+  >             raise error.Abort('%s' % exc)
+  > 
+  >     if opts['compress']:
+  >         bundler.setcompression(opts['compress'])
   > 
   >     if opts['reply']:
   >         capsstring = 'ping-pong\nelephants=babar,celeste\ncity%3D%21=celeste%2Cville'
@@ -143,11 +148,23 @@
   >     else:
   >         file = open(path, 'wb')
   > 
+  >     if opts['timeout']:
+  >         bundler.newpart('test:song', data=ELEPHANTSSONG, mandatory=False)
+  >         for idx, junk in enumerate(bundler.getchunks()):
+  >             ui.write('%d chunk\n' % idx)
+  >             if idx > 4:
+  >                 # This throws a GeneratorExit inside the generator, which
+  >                 # can cause problems if the exception-recovery code is
+  >                 # too zealous. It's important for this test that the break
+  >                 # occur while we're in the middle of a part.
+  >                 break
+  >         ui.write('fake timeout complete.\n')
+  >         return
   >     try:
   >         for chunk in bundler.getchunks():
   >             file.write(chunk)
   >     except RuntimeError, exc:
-  >         raise util.Abort(exc)
+  >         raise error.Abort(exc)
   > 
   > @command('unbundle2', [], '')
   > def cmdunbundle2(ui, repo, replypath=None):
@@ -161,9 +178,9 @@
   >             op = bundle2.processbundle(repo, unbundler, lambda: tr)
   >             tr.close()
   >         except error.BundleValueError, exc:
-  >             raise util.Abort('missing support for %s' % exc)
+  >             raise error.Abort('missing support for %s' % exc)
   >         except error.PushRaced, exc:
-  >             raise util.Abort('push race: %s' % exc)
+  >             raise error.Abort('push race: %s' % exc)
   >     finally:
   >         if tr is not None:
   >             tr.release()
@@ -187,7 +204,7 @@
   >     try:
   >         params = unbundler.params
   >     except error.BundleValueError, exc:
-  >        raise util.Abort('unknown parameters: %s' % exc)
+  >        raise error.Abort('unknown parameters: %s' % exc)
   >     ui.write('options count: %i\n' % len(params))
   >     for key in sorted(params):
   >         ui.write('- %s\n' % key)
@@ -236,8 +253,29 @@
 
 Test bundling
 
-  $ hg bundle2
-  HG20\x00\x00\x00\x00\x00\x00\x00\x00 (no-eol) (esc)
+  $ hg bundle2 | f --hexdump
+  
+  0000: 48 47 32 30 00 00 00 00 00 00 00 00             |HG20........|
+
+Test timeouts during bundling
+  $ hg bundle2 --timeout --debug --config devel.bundle2.debug=yes
+  bundle2-output-bundle: "HG20", 1 parts total
+  bundle2-output: start emission of HG20 stream
+  0 chunk
+  bundle2-output: bundle parameter: 
+  1 chunk
+  bundle2-output: start of parts
+  bundle2-output: bundle part: "test:song"
+  bundle2-output-part: "test:song" (advisory) 178 bytes payload
+  bundle2-output: part 0: "test:song"
+  bundle2-output: header chunk size: 16
+  2 chunk
+  3 chunk
+  bundle2-output: payload chunk size: 178
+  4 chunk
+  5 chunk
+  bundle2-generatorexit
+  fake timeout complete.
 
 Test unbundling
 
@@ -247,7 +285,7 @@
 
 Test old style bundle are detected and refused
 
-  $ hg bundle --all ../bundle.hg
+  $ hg bundle --all --type v1 ../bundle.hg
   1 changesets found
   $ hg statbundle2 < ../bundle.hg
   abort: unknown bundle version 10
@@ -266,8 +304,10 @@
 
 Test generation simple option
 
-  $ hg bundle2 --param 'caution'
-  HG20\x00\x00\x00\x07caution\x00\x00\x00\x00 (no-eol) (esc)
+  $ hg bundle2 --param 'caution' | f --hexdump
+  
+  0000: 48 47 32 30 00 00 00 07 63 61 75 74 69 6f 6e 00 |HG20....caution.|
+  0010: 00 00 00                                        |...|
 
 Test unbundling
 
@@ -278,8 +318,10 @@
 
 Test generation multiple option
 
-  $ hg bundle2 --param 'caution' --param 'meal'
-  HG20\x00\x00\x00\x0ccaution meal\x00\x00\x00\x00 (no-eol) (esc)
+  $ hg bundle2 --param 'caution' --param 'meal' | f --hexdump
+  
+  0000: 48 47 32 30 00 00 00 0c 63 61 75 74 69 6f 6e 20 |HG20....caution |
+  0010: 6d 65 61 6c 00 00 00 00                         |meal....|
 
 Test unbundling
 
@@ -294,8 +336,11 @@
 
 Test generation
 
-  $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants'
-  HG20\x00\x00\x00\x1ccaution meal=vegan elephants\x00\x00\x00\x00 (no-eol) (esc)
+  $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants' | f --hexdump
+  
+  0000: 48 47 32 30 00 00 00 1c 63 61 75 74 69 6f 6e 20 |HG20....caution |
+  0010: 6d 65 61 6c 3d 76 65 67 61 6e 20 65 6c 65 70 68 |meal=vegan eleph|
+  0020: 61 6e 74 73 00 00 00 00                         |ants....|
 
 Test unbundling
 
@@ -312,8 +357,12 @@
 
 Test generation
 
-  $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple
-  HG20\x00\x00\x00)e%7C%21%207/=babar%25%23%3D%3Dtutu simple\x00\x00\x00\x00 (no-eol) (esc)
+  $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple | f --hexdump
+  
+  0000: 48 47 32 30 00 00 00 29 65 25 37 43 25 32 31 25 |HG20...)e%7C%21%|
+  0010: 32 30 37 2f 3d 62 61 62 61 72 25 32 35 25 32 33 |207/=babar%25%23|
+  0020: 25 33 44 25 33 44 74 75 74 75 20 73 69 6d 70 6c |%3D%3Dtutu simpl|
+  0030: 65 00 00 00 00                                  |e....|
 
 Test unbundling
 
@@ -345,8 +394,12 @@
 
 file content is ok
 
-  $ cat ../out.hg2
-  HG20\x00\x00\x00)e%7C%21%207/=babar%25%23%3D%3Dtutu simple\x00\x00\x00\x00 (no-eol) (esc)
+  $ f --hexdump ../out.hg2
+  ../out.hg2:
+  0000: 48 47 32 30 00 00 00 29 65 25 37 43 25 32 31 25 |HG20...)e%7C%21%|
+  0010: 32 30 37 2f 3d 62 61 62 61 72 25 32 35 25 32 33 |207/=babar%25%23|
+  0020: 25 33 44 25 33 44 74 75 74 75 20 73 69 6d 70 6c |%3D%3Dtutu simpl|
+  0030: 65 00 00 00 00                                  |e....|
 
 unbundling debug
 
@@ -428,12 +481,34 @@
   bundle2-output: closing payload chunk
   bundle2-output: end of bundle
 
-  $ cat ../parts.hg2
-  HG20\x00\x00\x00\x00\x00\x00\x00\x11 (esc)
-  test:empty\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x11 (esc)
-  test:empty\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10	test:song\x00\x00\x00\x02\x00\x00\x00\x00\x00\xb2Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko (esc)
-  Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
-  Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.\x00\x00\x00\x00\x00\x00\x00\x16\x0ftest:debugreply\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00+	test:math\x00\x00\x00\x04\x02\x01\x02\x04\x01\x04\x07\x03pi3.14e2.72cookingraw\x00\x00\x00\x0242\x00\x00\x00\x00\x00\x00\x00\x1d	test:song\x00\x00\x00\x05\x01\x00\x0b\x00randomparam\x00\x00\x00\x00\x00\x00\x00\x10	test:ping\x00\x00\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 (no-eol) (esc)
+  $ f --hexdump ../parts.hg2
+  ../parts.hg2:
+  0000: 48 47 32 30 00 00 00 00 00 00 00 11 0a 74 65 73 |HG20.........tes|
+  0010: 74 3a 65 6d 70 74 79 00 00 00 00 00 00 00 00 00 |t:empty.........|
+  0020: 00 00 00 00 11 0a 74 65 73 74 3a 65 6d 70 74 79 |......test:empty|
+  0030: 00 00 00 01 00 00 00 00 00 00 00 00 00 10 09 74 |...............t|
+  0040: 65 73 74 3a 73 6f 6e 67 00 00 00 02 00 00 00 00 |est:song........|
+  0050: 00 b2 50 61 74 61 6c 69 20 44 69 72 61 70 61 74 |..Patali Dirapat|
+  0060: 61 2c 20 43 72 6f 6d 64 61 20 43 72 6f 6d 64 61 |a, Cromda Cromda|
+  0070: 20 52 69 70 61 6c 6f 2c 20 50 61 74 61 20 50 61 | Ripalo, Pata Pa|
+  0080: 74 61 2c 20 4b 6f 20 4b 6f 20 4b 6f 0a 42 6f 6b |ta, Ko Ko Ko.Bok|
+  0090: 6f 72 6f 20 44 69 70 6f 75 6c 69 74 6f 2c 20 52 |oro Dipoulito, R|
+  00a0: 6f 6e 64 69 20 52 6f 6e 64 69 20 50 65 70 69 6e |ondi Rondi Pepin|
+  00b0: 6f 2c 20 50 61 74 61 20 50 61 74 61 2c 20 4b 6f |o, Pata Pata, Ko|
+  00c0: 20 4b 6f 20 4b 6f 0a 45 6d 61 6e 61 20 4b 61 72 | Ko Ko.Emana Kar|
+  00d0: 61 73 73 6f 6c 69 2c 20 4c 6f 75 63 72 61 20 4c |assoli, Loucra L|
+  00e0: 6f 75 63 72 61 20 50 6f 6e 70 6f 6e 74 6f 2c 20 |oucra Ponponto, |
+  00f0: 50 61 74 61 20 50 61 74 61 2c 20 4b 6f 20 4b 6f |Pata Pata, Ko Ko|
+  0100: 20 4b 6f 2e 00 00 00 00 00 00 00 16 0f 74 65 73 | Ko..........tes|
+  0110: 74 3a 64 65 62 75 67 72 65 70 6c 79 00 00 00 03 |t:debugreply....|
+  0120: 00 00 00 00 00 00 00 00 00 2b 09 74 65 73 74 3a |.........+.test:|
+  0130: 6d 61 74 68 00 00 00 04 02 01 02 04 01 04 07 03 |math............|
+  0140: 70 69 33 2e 31 34 65 32 2e 37 32 63 6f 6f 6b 69 |pi3.14e2.72cooki|
+  0150: 6e 67 72 61 77 00 00 00 02 34 32 00 00 00 00 00 |ngraw....42.....|
+  0160: 00 00 1d 09 74 65 73 74 3a 73 6f 6e 67 00 00 00 |....test:song...|
+  0170: 05 01 00 0b 00 72 61 6e 64 6f 6d 70 61 72 61 6d |.....randomparam|
+  0180: 00 00 00 00 00 00 00 10 09 74 65 73 74 3a 70 69 |.........test:pi|
+  0190: 6e 67 00 00 00 06 00 00 00 00 00 00 00 00 00 00 |ng..............|
 
 
   $ hg statbundle2 < ../parts.hg2
@@ -654,21 +729,49 @@
 
 The reply is a bundle
 
-  $ cat ../reply.hg2
-  HG20\x00\x00\x00\x00\x00\x00\x00\x1b\x06output\x00\x00\x00\x00\x00\x01\x0b\x01in-reply-to3\x00\x00\x00\xd9The choir starts singing: (esc)
-      Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
-      Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
-      Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
-  \x00\x00\x00\x00\x00\x00\x00\x1b\x06output\x00\x00\x00\x01\x00\x01\x0b\x01in-reply-to4\x00\x00\x00\xc9debugreply: capabilities: (esc)
-  debugreply:     'city=!'
-  debugreply:         'celeste,ville'
-  debugreply:     'elephants'
-  debugreply:         'babar'
-  debugreply:         'celeste'
-  debugreply:     'ping-pong'
-  \x00\x00\x00\x00\x00\x00\x00\x1e	test:pong\x00\x00\x00\x02\x01\x00\x0b\x01in-reply-to7\x00\x00\x00\x00\x00\x00\x00\x1b\x06output\x00\x00\x00\x03\x00\x01\x0b\x01in-reply-to7\x00\x00\x00=received ping request (id 7) (esc)
-  replying to ping request (id 7)
-  \x00\x00\x00\x00\x00\x00\x00\x00 (no-eol) (esc)
+  $ f --hexdump ../reply.hg2
+  ../reply.hg2:
+  0000: 48 47 32 30 00 00 00 00 00 00 00 1b 06 6f 75 74 |HG20.........out|
+  0010: 70 75 74 00 00 00 00 00 01 0b 01 69 6e 2d 72 65 |put........in-re|
+  0020: 70 6c 79 2d 74 6f 33 00 00 00 d9 54 68 65 20 63 |ply-to3....The c|
+  0030: 68 6f 69 72 20 73 74 61 72 74 73 20 73 69 6e 67 |hoir starts sing|
+  0040: 69 6e 67 3a 0a 20 20 20 20 50 61 74 61 6c 69 20 |ing:.    Patali |
+  0050: 44 69 72 61 70 61 74 61 2c 20 43 72 6f 6d 64 61 |Dirapata, Cromda|
+  0060: 20 43 72 6f 6d 64 61 20 52 69 70 61 6c 6f 2c 20 | Cromda Ripalo, |
+  0070: 50 61 74 61 20 50 61 74 61 2c 20 4b 6f 20 4b 6f |Pata Pata, Ko Ko|
+  0080: 20 4b 6f 0a 20 20 20 20 42 6f 6b 6f 72 6f 20 44 | Ko.    Bokoro D|
+  0090: 69 70 6f 75 6c 69 74 6f 2c 20 52 6f 6e 64 69 20 |ipoulito, Rondi |
+  00a0: 52 6f 6e 64 69 20 50 65 70 69 6e 6f 2c 20 50 61 |Rondi Pepino, Pa|
+  00b0: 74 61 20 50 61 74 61 2c 20 4b 6f 20 4b 6f 20 4b |ta Pata, Ko Ko K|
+  00c0: 6f 0a 20 20 20 20 45 6d 61 6e 61 20 4b 61 72 61 |o.    Emana Kara|
+  00d0: 73 73 6f 6c 69 2c 20 4c 6f 75 63 72 61 20 4c 6f |ssoli, Loucra Lo|
+  00e0: 75 63 72 61 20 50 6f 6e 70 6f 6e 74 6f 2c 20 50 |ucra Ponponto, P|
+  00f0: 61 74 61 20 50 61 74 61 2c 20 4b 6f 20 4b 6f 20 |ata Pata, Ko Ko |
+  0100: 4b 6f 2e 0a 00 00 00 00 00 00 00 1b 06 6f 75 74 |Ko...........out|
+  0110: 70 75 74 00 00 00 01 00 01 0b 01 69 6e 2d 72 65 |put........in-re|
+  0120: 70 6c 79 2d 74 6f 34 00 00 00 c9 64 65 62 75 67 |ply-to4....debug|
+  0130: 72 65 70 6c 79 3a 20 63 61 70 61 62 69 6c 69 74 |reply: capabilit|
+  0140: 69 65 73 3a 0a 64 65 62 75 67 72 65 70 6c 79 3a |ies:.debugreply:|
+  0150: 20 20 20 20 20 27 63 69 74 79 3d 21 27 0a 64 65 |     'city=!'.de|
+  0160: 62 75 67 72 65 70 6c 79 3a 20 20 20 20 20 20 20 |bugreply:       |
+  0170: 20 20 27 63 65 6c 65 73 74 65 2c 76 69 6c 6c 65 |  'celeste,ville|
+  0180: 27 0a 64 65 62 75 67 72 65 70 6c 79 3a 20 20 20 |'.debugreply:   |
+  0190: 20 20 27 65 6c 65 70 68 61 6e 74 73 27 0a 64 65 |  'elephants'.de|
+  01a0: 62 75 67 72 65 70 6c 79 3a 20 20 20 20 20 20 20 |bugreply:       |
+  01b0: 20 20 27 62 61 62 61 72 27 0a 64 65 62 75 67 72 |  'babar'.debugr|
+  01c0: 65 70 6c 79 3a 20 20 20 20 20 20 20 20 20 27 63 |eply:         'c|
+  01d0: 65 6c 65 73 74 65 27 0a 64 65 62 75 67 72 65 70 |eleste'.debugrep|
+  01e0: 6c 79 3a 20 20 20 20 20 27 70 69 6e 67 2d 70 6f |ly:     'ping-po|
+  01f0: 6e 67 27 0a 00 00 00 00 00 00 00 1e 09 74 65 73 |ng'..........tes|
+  0200: 74 3a 70 6f 6e 67 00 00 00 02 01 00 0b 01 69 6e |t:pong........in|
+  0210: 2d 72 65 70 6c 79 2d 74 6f 37 00 00 00 00 00 00 |-reply-to7......|
+  0220: 00 1b 06 6f 75 74 70 75 74 00 00 00 03 00 01 0b |...output.......|
+  0230: 01 69 6e 2d 72 65 70 6c 79 2d 74 6f 37 00 00 00 |.in-reply-to7...|
+  0240: 3d 72 65 63 65 69 76 65 64 20 70 69 6e 67 20 72 |=received ping r|
+  0250: 65 71 75 65 73 74 20 28 69 64 20 37 29 0a 72 65 |equest (id 7).re|
+  0260: 70 6c 79 69 6e 67 20 74 6f 20 70 69 6e 67 20 72 |plying to ping r|
+  0270: 65 71 75 65 73 74 20 28 69 64 20 37 29 0a 00 00 |equest (id 7)...|
+  0280: 00 00 00 00 00 00                               |......|
 
 The reply is valid
 
@@ -779,29 +882,108 @@
   bundle2-output: closing payload chunk
   bundle2-output: end of bundle
 
-  $ cat ../rev.hg2
-  HG20\x00\x00\x00\x00\x00\x00\x00\x12\x0bchangegroup\x00\x00\x00\x00\x00\x00\x00\x00\x06\x13\x00\x00\x00\xa42\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j_\xdd\xd9\x89W\xc8\xa5JMCm\xfe\x1d\xa9\xd8\x7f!\xa1\xb9{\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)6e1f4c47ecb533ffd0c8e52cdc88afb6cd39e20c (esc)
-  \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x02D (esc)
-  \x00\x00\x00i\x00\x00\x00j\x00\x00\x00\x01D\x00\x00\x00\xa4\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\xcd\x01\x0b\x8c\xd9\x98\xf3\x98\x1aZ\x81\x15\xf9O\x8d\xa4\xabP`\x89\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)4dece9c826f69490507b98c6383a3009b295837d (esc)
-  \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x02E (esc)
-  \x00\x00\x00i\x00\x00\x00j\x00\x00\x00\x01E\x00\x00\x00\xa2\xee\xa17Fy\x9a\x9e\x0b\xfd\x88\xf2\x9d<.\x9d\xc98\x9fRO$\xb68|\x8c\x8c\xae7\x17\x88\x80\xf3\xfa\x95\xde\xd3\xcb\x1c\xf7\x85\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\xee\xa17Fy\x9a\x9e\x0b\xfd\x88\xf2\x9d<.\x9d\xc98\x9fRO\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)365b93d57fdf4814e2b5911d6bacff2b12014441 (esc)
-  \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x00\x00\x00\x00i\x00\x00\x00j\x00\x00\x00\x01G\x00\x00\x00\xa4\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
-  \x87\xcd\xc9n\x8e\xaa\xb6$\xb68|\x8c\x8c\xae7\x17\x88\x80\xf3\xfa\x95\xde\xd3\xcb\x1c\xf7\x85\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
-  \x87\xcd\xc9n\x8e\xaa\xb6\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)8bee48edc7318541fc0013ee41b089276a8c24bf (esc)
-  \x00\x00\x00f\x00\x00\x00f\x00\x00\x00\x02H (esc)
-  \x00\x00\x00g\x00\x00\x00h\x00\x00\x00\x01H\x00\x00\x00\x00\x00\x00\x00\x8bn\x1fLG\xec\xb53\xff\xd0\xc8\xe5,\xdc\x88\xaf\xb6\xcd9\xe2\x0cf\xa5\xa0\x18\x17\xfd\xf5#\x9c'8\x02\xb5\xb7a\x8d\x05\x1c\x89\xe4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x81\x00\x00\x00\x81\x00\x00\x00+D\x00c3f1ca2924c16a19b0656a84900e504e5b0aec2d (esc)
-  \x00\x00\x00\x8bM\xec\xe9\xc8&\xf6\x94\x90P{\x98\xc68:0	\xb2\x95\x83}\x00}\x8c\x9d\x88\x84\x13%\xf5\xc6\xb0cq\xb3[N\x8a+\x1a\x83\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\x00\x00\x00+\x00\x00\x00\xac\x00\x00\x00+E\x009c6fd0350a6c0d0c49d4a9c5017cf07043f54e58 (esc)
-  \x00\x00\x00\x8b6[\x93\xd5\x7f\xdfH\x14\xe2\xb5\x91\x1dk\xac\xff+\x12\x01DA(\xa5\x84\xc6^\xf1!\xf8\x9e\xb6j\xb7\xd0\xbc\x15=\x80\x99\xe7\xceM\xec\xe9\xc8&\xf6\x94\x90P{\x98\xc68:0	\xb2\x95\x83}\xee\xa17Fy\x9a\x9e\x0b\xfd\x88\xf2\x9d<.\x9d\xc98\x9fRO\x00\x00\x00V\x00\x00\x00V\x00\x00\x00+F\x0022bfcfd62a21a3287edbd4d656218d0f525ed76a (esc)
-  \x00\x00\x00\x97\x8b\xeeH\xed\xc71\x85A\xfc\x00\x13\xeeA\xb0\x89'j\x8c$\xbf(\xa5\x84\xc6^\xf1!\xf8\x9e\xb6j\xb7\xd0\xbc\x15=\x80\x99\xe7\xce\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
-  \x87\xcd\xc9n\x8e\xaa\xb6\x00\x00\x00+\x00\x00\x00V\x00\x00\x00\x00\x00\x00\x00\x81\x00\x00\x00\x81\x00\x00\x00+H\x008500189e74a9e0475e822093bc7db0d631aeb0b4 (esc)
-  \x00\x00\x00\x00\x00\x00\x00\x05D\x00\x00\x00b\xc3\xf1\xca)$\xc1j\x19\xb0ej\x84\x90\x0ePN[ (esc)
-  \xec-\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02D (esc)
-  \x00\x00\x00\x00\x00\x00\x00\x05E\x00\x00\x00b\x9co\xd05 (esc)
-  l\r (no-eol) (esc)
-  \x0cI\xd4\xa9\xc5\x01|\xf0pC\xf5NX\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02E (esc)
-  \x00\x00\x00\x00\x00\x00\x00\x05H\x00\x00\x00b\x85\x00\x18\x9et\xa9\xe0G^\x82 \x93\xbc}\xb0\xd61\xae\xb0\xb4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
-  \x87\xcd\xc9n\x8e\xaa\xb6\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02H (esc)
-  \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 (no-eol) (esc)
+  $ f --hexdump ../rev.hg2
+  ../rev.hg2:
+  0000: 48 47 32 30 00 00 00 00 00 00 00 12 0b 63 68 61 |HG20.........cha|
+  0010: 6e 67 65 67 72 6f 75 70 00 00 00 00 00 00 00 00 |ngegroup........|
+  0020: 06 13 00 00 00 a4 32 af 76 86 d4 03 cf 45 b5 d9 |......2.v....E..|
+  0030: 5f 2d 70 ce be a5 87 ac 80 6a 5f dd d9 89 57 c8 |_-p......j_...W.|
+  0040: a5 4a 4d 43 6d fe 1d a9 d8 7f 21 a1 b9 7b 00 00 |.JMCm.....!..{..|
+  0050: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
+  0060: 00 00 32 af 76 86 d4 03 cf 45 b5 d9 5f 2d 70 ce |..2.v....E.._-p.|
+  0070: be a5 87 ac 80 6a 00 00 00 00 00 00 00 29 00 00 |.....j.......)..|
+  0080: 00 29 36 65 31 66 34 63 34 37 65 63 62 35 33 33 |.)6e1f4c47ecb533|
+  0090: 66 66 64 30 63 38 65 35 32 63 64 63 38 38 61 66 |ffd0c8e52cdc88af|
+  00a0: 62 36 63 64 33 39 65 32 30 63 0a 00 00 00 66 00 |b6cd39e20c....f.|
+  00b0: 00 00 68 00 00 00 02 44 0a 00 00 00 69 00 00 00 |..h....D....i...|
+  00c0: 6a 00 00 00 01 44 00 00 00 a4 95 20 ee a7 81 bc |j....D..... ....|
+  00d0: ca 16 c1 e1 5a cc 0b a1 43 35 a0 e8 e5 ba cd 01 |....Z...C5......|
+  00e0: 0b 8c d9 98 f3 98 1a 5a 81 15 f9 4f 8d a4 ab 50 |.......Z...O...P|
+  00f0: 60 89 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |`...............|
+  0100: 00 00 00 00 00 00 95 20 ee a7 81 bc ca 16 c1 e1 |....... ........|
+  0110: 5a cc 0b a1 43 35 a0 e8 e5 ba 00 00 00 00 00 00 |Z...C5..........|
+  0120: 00 29 00 00 00 29 34 64 65 63 65 39 63 38 32 36 |.)...)4dece9c826|
+  0130: 66 36 39 34 39 30 35 30 37 62 39 38 63 36 33 38 |f69490507b98c638|
+  0140: 33 61 33 30 30 39 62 32 39 35 38 33 37 64 0a 00 |3a3009b295837d..|
+  0150: 00 00 66 00 00 00 68 00 00 00 02 45 0a 00 00 00 |..f...h....E....|
+  0160: 69 00 00 00 6a 00 00 00 01 45 00 00 00 a2 ee a1 |i...j....E......|
+  0170: 37 46 79 9a 9e 0b fd 88 f2 9d 3c 2e 9d c9 38 9f |7Fy.......<...8.|
+  0180: 52 4f 24 b6 38 7c 8c 8c ae 37 17 88 80 f3 fa 95 |RO$.8|...7......|
+  0190: de d3 cb 1c f7 85 95 20 ee a7 81 bc ca 16 c1 e1 |....... ........|
+  01a0: 5a cc 0b a1 43 35 a0 e8 e5 ba ee a1 37 46 79 9a |Z...C5......7Fy.|
+  01b0: 9e 0b fd 88 f2 9d 3c 2e 9d c9 38 9f 52 4f 00 00 |......<...8.RO..|
+  01c0: 00 00 00 00 00 29 00 00 00 29 33 36 35 62 39 33 |.....)...)365b93|
+  01d0: 64 35 37 66 64 66 34 38 31 34 65 32 62 35 39 31 |d57fdf4814e2b591|
+  01e0: 31 64 36 62 61 63 66 66 32 62 31 32 30 31 34 34 |1d6bacff2b120144|
+  01f0: 34 31 0a 00 00 00 66 00 00 00 68 00 00 00 00 00 |41....f...h.....|
+  0200: 00 00 69 00 00 00 6a 00 00 00 01 47 00 00 00 a4 |..i...j....G....|
+  0210: 02 de 42 19 6e be e4 2e f2 84 b6 78 0a 87 cd c9 |..B.n......x....|
+  0220: 6e 8e aa b6 24 b6 38 7c 8c 8c ae 37 17 88 80 f3 |n...$.8|...7....|
+  0230: fa 95 de d3 cb 1c f7 85 00 00 00 00 00 00 00 00 |................|
+  0240: 00 00 00 00 00 00 00 00 00 00 00 00 02 de 42 19 |..............B.|
+  0250: 6e be e4 2e f2 84 b6 78 0a 87 cd c9 6e 8e aa b6 |n......x....n...|
+  0260: 00 00 00 00 00 00 00 29 00 00 00 29 38 62 65 65 |.......)...)8bee|
+  0270: 34 38 65 64 63 37 33 31 38 35 34 31 66 63 30 30 |48edc7318541fc00|
+  0280: 31 33 65 65 34 31 62 30 38 39 32 37 36 61 38 63 |13ee41b089276a8c|
+  0290: 32 34 62 66 0a 00 00 00 66 00 00 00 66 00 00 00 |24bf....f...f...|
+  02a0: 02 48 0a 00 00 00 67 00 00 00 68 00 00 00 01 48 |.H....g...h....H|
+  02b0: 00 00 00 00 00 00 00 8b 6e 1f 4c 47 ec b5 33 ff |........n.LG..3.|
+  02c0: d0 c8 e5 2c dc 88 af b6 cd 39 e2 0c 66 a5 a0 18 |...,.....9..f...|
+  02d0: 17 fd f5 23 9c 27 38 02 b5 b7 61 8d 05 1c 89 e4 |...#.'8...a.....|
+  02e0: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
+  02f0: 00 00 00 00 32 af 76 86 d4 03 cf 45 b5 d9 5f 2d |....2.v....E.._-|
+  0300: 70 ce be a5 87 ac 80 6a 00 00 00 81 00 00 00 81 |p......j........|
+  0310: 00 00 00 2b 44 00 63 33 66 31 63 61 32 39 32 34 |...+D.c3f1ca2924|
+  0320: 63 31 36 61 31 39 62 30 36 35 36 61 38 34 39 30 |c16a19b0656a8490|
+  0330: 30 65 35 30 34 65 35 62 30 61 65 63 32 64 0a 00 |0e504e5b0aec2d..|
+  0340: 00 00 8b 4d ec e9 c8 26 f6 94 90 50 7b 98 c6 38 |...M...&...P{..8|
+  0350: 3a 30 09 b2 95 83 7d 00 7d 8c 9d 88 84 13 25 f5 |:0....}.}.....%.|
+  0360: c6 b0 63 71 b3 5b 4e 8a 2b 1a 83 00 00 00 00 00 |..cq.[N.+.......|
+  0370: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 95 |................|
+  0380: 20 ee a7 81 bc ca 16 c1 e1 5a cc 0b a1 43 35 a0 | ........Z...C5.|
+  0390: e8 e5 ba 00 00 00 2b 00 00 00 ac 00 00 00 2b 45 |......+.......+E|
+  03a0: 00 39 63 36 66 64 30 33 35 30 61 36 63 30 64 30 |.9c6fd0350a6c0d0|
+  03b0: 63 34 39 64 34 61 39 63 35 30 31 37 63 66 30 37 |c49d4a9c5017cf07|
+  03c0: 30 34 33 66 35 34 65 35 38 0a 00 00 00 8b 36 5b |043f54e58.....6[|
+  03d0: 93 d5 7f df 48 14 e2 b5 91 1d 6b ac ff 2b 12 01 |....H.....k..+..|
+  03e0: 44 41 28 a5 84 c6 5e f1 21 f8 9e b6 6a b7 d0 bc |DA(...^.!...j...|
+  03f0: 15 3d 80 99 e7 ce 4d ec e9 c8 26 f6 94 90 50 7b |.=....M...&...P{|
+  0400: 98 c6 38 3a 30 09 b2 95 83 7d ee a1 37 46 79 9a |..8:0....}..7Fy.|
+  0410: 9e 0b fd 88 f2 9d 3c 2e 9d c9 38 9f 52 4f 00 00 |......<...8.RO..|
+  0420: 00 56 00 00 00 56 00 00 00 2b 46 00 32 32 62 66 |.V...V...+F.22bf|
+  0430: 63 66 64 36 32 61 32 31 61 33 32 38 37 65 64 62 |cfd62a21a3287edb|
+  0440: 64 34 64 36 35 36 32 31 38 64 30 66 35 32 35 65 |d4d656218d0f525e|
+  0450: 64 37 36 61 0a 00 00 00 97 8b ee 48 ed c7 31 85 |d76a.......H..1.|
+  0460: 41 fc 00 13 ee 41 b0 89 27 6a 8c 24 bf 28 a5 84 |A....A..'j.$.(..|
+  0470: c6 5e f1 21 f8 9e b6 6a b7 d0 bc 15 3d 80 99 e7 |.^.!...j....=...|
+  0480: ce 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
+  0490: 00 00 00 00 00 02 de 42 19 6e be e4 2e f2 84 b6 |.......B.n......|
+  04a0: 78 0a 87 cd c9 6e 8e aa b6 00 00 00 2b 00 00 00 |x....n......+...|
+  04b0: 56 00 00 00 00 00 00 00 81 00 00 00 81 00 00 00 |V...............|
+  04c0: 2b 48 00 38 35 30 30 31 38 39 65 37 34 61 39 65 |+H.8500189e74a9e|
+  04d0: 30 34 37 35 65 38 32 32 30 39 33 62 63 37 64 62 |0475e822093bc7db|
+  04e0: 30 64 36 33 31 61 65 62 30 62 34 0a 00 00 00 00 |0d631aeb0b4.....|
+  04f0: 00 00 00 05 44 00 00 00 62 c3 f1 ca 29 24 c1 6a |....D...b...)$.j|
+  0500: 19 b0 65 6a 84 90 0e 50 4e 5b 0a ec 2d 00 00 00 |..ej...PN[..-...|
+  0510: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
+  0520: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
+  0530: 00 00 00 00 00 32 af 76 86 d4 03 cf 45 b5 d9 5f |.....2.v....E.._|
+  0540: 2d 70 ce be a5 87 ac 80 6a 00 00 00 00 00 00 00 |-p......j.......|
+  0550: 00 00 00 00 02 44 0a 00 00 00 00 00 00 00 05 45 |.....D.........E|
+  0560: 00 00 00 62 9c 6f d0 35 0a 6c 0d 0c 49 d4 a9 c5 |...b.o.5.l..I...|
+  0570: 01 7c f0 70 43 f5 4e 58 00 00 00 00 00 00 00 00 |.|.pC.NX........|
+  0580: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
+  0590: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
+  05a0: 95 20 ee a7 81 bc ca 16 c1 e1 5a cc 0b a1 43 35 |. ........Z...C5|
+  05b0: a0 e8 e5 ba 00 00 00 00 00 00 00 00 00 00 00 02 |................|
+  05c0: 45 0a 00 00 00 00 00 00 00 05 48 00 00 00 62 85 |E.........H...b.|
+  05d0: 00 18 9e 74 a9 e0 47 5e 82 20 93 bc 7d b0 d6 31 |...t..G^. ..}..1|
+  05e0: ae b0 b4 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
+  05f0: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
+  0600: 00 00 00 00 00 00 00 00 00 00 00 02 de 42 19 6e |.............B.n|
+  0610: be e4 2e f2 84 b6 78 0a 87 cd c9 6e 8e aa b6 00 |......x....n....|
+  0620: 00 00 00 00 00 00 00 00 00 00 02 48 0a 00 00 00 |...........H....|
+  0630: 00 00 00 00 00 00 00 00 00 00 00 00 00          |.............|
 
   $ hg debugbundle ../rev.hg2
   Stream params: {}
@@ -815,6 +997,7 @@
   adding manifests
   adding file changes
   added 0 changesets with 0 changes to 3 files
+  (run 'hg update' to get a working copy)
 
 with reply
 
@@ -823,12 +1006,21 @@
   0 unread bytes
   addchangegroup return: 1
 
-  $ cat ../rev-reply.hg2
-  HG20\x00\x00\x00\x00\x00\x00\x00/\x11reply:changegroup\x00\x00\x00\x00\x00\x02\x0b\x01\x06\x01in-reply-to1return1\x00\x00\x00\x00\x00\x00\x00\x1b\x06output\x00\x00\x00\x01\x00\x01\x0b\x01in-reply-to1\x00\x00\x00dadding changesets (esc)
-  adding manifests
-  adding file changes
-  added 0 changesets with 0 changes to 3 files
-  \x00\x00\x00\x00\x00\x00\x00\x00 (no-eol) (esc)
+  $ f --hexdump ../rev-reply.hg2
+  ../rev-reply.hg2:
+  0000: 48 47 32 30 00 00 00 00 00 00 00 2f 11 72 65 70 |HG20......./.rep|
+  0010: 6c 79 3a 63 68 61 6e 67 65 67 72 6f 75 70 00 00 |ly:changegroup..|
+  0020: 00 00 00 02 0b 01 06 01 69 6e 2d 72 65 70 6c 79 |........in-reply|
+  0030: 2d 74 6f 31 72 65 74 75 72 6e 31 00 00 00 00 00 |-to1return1.....|
+  0040: 00 00 1b 06 6f 75 74 70 75 74 00 00 00 01 00 01 |....output......|
+  0050: 0b 01 69 6e 2d 72 65 70 6c 79 2d 74 6f 31 00 00 |..in-reply-to1..|
+  0060: 00 64 61 64 64 69 6e 67 20 63 68 61 6e 67 65 73 |.dadding changes|
+  0070: 65 74 73 0a 61 64 64 69 6e 67 20 6d 61 6e 69 66 |ets.adding manif|
+  0080: 65 73 74 73 0a 61 64 64 69 6e 67 20 66 69 6c 65 |ests.adding file|
+  0090: 20 63 68 61 6e 67 65 73 0a 61 64 64 65 64 20 30 | changes.added 0|
+  00a0: 20 63 68 61 6e 67 65 73 65 74 73 20 77 69 74 68 | changesets with|
+  00b0: 20 30 20 63 68 61 6e 67 65 73 20 74 6f 20 33 20 | 0 changes to 3 |
+  00c0: 66 69 6c 65 73 0a 00 00 00 00 00 00 00 00       |files.........|
 
 Check handling of exception during generation.
 ----------------------------------------------
@@ -839,9 +1031,16 @@
 
 Should still be a valid bundle
 
-  $ cat ../genfailed.hg2
-  HG20\x00\x00\x00\x00\x00\x00\x00\r (no-eol) (esc)
-  \x06output\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00H\x0berror:abort\x00\x00\x00\x00\x01\x00\x07-messageunexpected error: Someone set up us the bomb!\x00\x00\x00\x00\x00\x00\x00\x00 (no-eol) (esc)
+  $ f --hexdump ../genfailed.hg2
+  ../genfailed.hg2:
+  0000: 48 47 32 30 00 00 00 00 00 00 00 0d 06 6f 75 74 |HG20.........out|
+  0010: 70 75 74 00 00 00 00 00 00 ff ff ff ff 00 00 00 |put.............|
+  0020: 48 0b 65 72 72 6f 72 3a 61 62 6f 72 74 00 00 00 |H.error:abort...|
+  0030: 00 01 00 07 2d 6d 65 73 73 61 67 65 75 6e 65 78 |....-messageunex|
+  0040: 70 65 63 74 65 64 20 65 72 72 6f 72 3a 20 53 6f |pected error: So|
+  0050: 6d 65 6f 6e 65 20 73 65 74 20 75 70 20 75 73 20 |meone set up us |
+  0060: 74 68 65 20 62 6f 6d 62 21 00 00 00 00 00 00 00 |the bomb!.......|
+  0070: 00                                              |.|
 
 And its handling on the other size raise a clean exception
 
@@ -850,5 +1049,182 @@
   abort: unexpected error: Someone set up us the bomb!
   [255]
 
+Test compression
+================
+
+Simple case where it just work: GZ
+----------------------------------
+
+  $ hg bundle2 --compress GZ --rev '8+7+5+4' ../rev.hg2.bz
+  $ f --hexdump ../rev.hg2.bz
+  ../rev.hg2.bz:
+  0000: 48 47 32 30 00 00 00 0e 43 6f 6d 70 72 65 73 73 |HG20....Compress|
+  0010: 69 6f 6e 3d 47 5a 78 9c 95 94 7d 68 95 55 1c c7 |ion=GZx...}h.U..|
+  0020: 9f 3b 31 e8 ce fa c3 65 be a0 a4 b4 52 b9 29 e7 |.;1....e....R.).|
+  0030: f5 79 ce 89 fa 63 ed 5e 77 8b 9c c3 3f 2a 1c 68 |.y...c.^w...?*.h|
+  0040: cf 79 9b dd 6a ae b0 28 74 b8 e5 96 5b bb 86 61 |.y..j..(t...[..a|
+  0050: a3 15 6e 3a 71 c8 6a e8 a5 da 95 64 28 22 ce 69 |..n:q.j....d(".i|
+  0060: cd 06 59 34 28 2b 51 2a 58 c3 17 56 2a 9a 9d 67 |..Y4(+Q*X..V*..g|
+  0070: dc c6 35 9e c4 1d f8 9e 87 f3 9c f3 3b bf 0f bf |..5.........;...|
+  0080: 97 e3 38 ce f4 42 b9 d6 af ae d2 55 af ae 7b ad |..8..B.....U..{.|
+  0090: c6 c9 8d bb 8a ec b4 07 ed 7f fd ed d3 53 be 4e |.............S.N|
+  00a0: f4 0e af 59 52 73 ea 50 d7 96 9e ba d4 9a 1f 87 |...YRs.P........|
+  00b0: 9b 9f 1d e8 7a 6a 79 e9 cb 7f cf eb fe 7e d3 82 |....zjy......~..|
+  00c0: ce 2f 36 38 21 23 cc 36 b7 b5 38 90 ab a1 21 92 |./68!#.6..8...!.|
+  00d0: 78 5a 0a 8a b1 31 0a 48 a6 29 92 4a 32 e6 1b e1 |xZ...1.H.).J2...|
+  00e0: 4a 85 b9 46 40 46 ed 61 63 b5 d6 aa 20 1e ac 5e |J..F@F.ac... ..^|
+  00f0: b0 0a ae 8a c4 03 c6 d6 f9 a3 7b eb fb 4e de 7f |..........{..N..|
+  0100: e4 97 55 5f 15 76 96 d2 5d bf 9d 3f 38 18 29 4c |..U_.v..]..?8.)L|
+  0110: 0f b7 5d 6e 9b b3 aa 7e c6 d5 15 5b f7 7c 52 f1 |..]n...~...[.|R.|
+  0120: 7c 73 18 63 98 6d 3e 23 51 5a 6a 2e 19 72 8d cb ||s.c.m>#QZj..r..|
+  0130: 09 07 14 78 82 33 e9 62 86 7d 0c 00 17 88 53 86 |...x.3.b.}....S.|
+  0140: 3d 75 0b 63 e2 16 c6 84 9d 76 8f 76 7a cb de fc |=u.c.....v.vz...|
+  0150: a8 a3 f0 46 d3 a5 f6 c7 96 b6 9f 60 3b 57 ae 28 |...F.......`;W.(|
+  0160: ce b2 8d e9 f4 3e 6f 66 53 dd e5 6b ad 67 be f9 |.....>ofS..k.g..|
+  0170: 72 ee 5f 8d 61 3c 61 b6 f9 8c d8 a5 82 63 45 3d |r._.a<a......cE=|
+  0180: a3 0c 61 90 68 24 28 87 50 b9 c2 97 c6 20 01 11 |..a.h$(.P.... ..|
+  0190: 80 84 10 98 cf e8 e4 13 96 05 51 2c 38 f3 c4 ec |..........Q,8...|
+  01a0: ea 43 e7 96 5e 6a c8 be 11 dd 32 78 a2 fa dd 8f |.C..^j....2x....|
+  01b0: b3 61 84 61 51 0c b3 cd 27 64 42 6b c2 b4 92 1e |.a.aQ...'dBk....|
+  01c0: 86 8c 12 68 24 00 10 db 7f 50 00 c6 91 e7 fa 4c |...h$....P.....L|
+  01d0: 22 22 cc bf 84 81 0a 92 c1 aa 2a c7 1b 49 e6 ee |""........*..I..|
+  01e0: 6b a9 7e e0 e9 b2 91 5e 7c 73 68 e0 fc 23 3f 34 |k.~....^|sh..#?4|
+  01f0: ed cf 0e f2 b3 d3 4c d7 ae 59 33 6f 8c 3d b8 63 |......L..Y3o.=.c|
+  0200: 21 2b e8 3d e0 6f 9d 3a b7 f9 dc 24 2a b2 3e a7 |!+.=.o.:...$*.>.|
+  0210: 58 dc 91 d8 40 e9 23 8e 88 84 ae 0f b9 00 2e b5 |X...@.#.........|
+  0220: 74 36 f3 40 53 40 34 15 c0 d7 12 8d e7 bb 65 f9 |t6.@S@4.......e.|
+  0230: c8 ef 03 0f ff f9 fe b6 8a 0d 6d fd ec 51 70 f7 |..........m..Qp.|
+  0240: a7 ad 9b 6b 9d da 74 7b 53 43 d1 43 63 fd 19 f9 |...k..t{SC.Cc...|
+  0250: ca 67 95 e5 ef c4 e6 6c 9e 44 e1 c5 ac 7a 82 6f |.g.....l.D...z.o|
+  0260: c2 e1 d2 b5 2d 81 29 f0 5d 09 6c 6f 10 ae 88 cf |....-.).].lo....|
+  0270: 25 05 d0 93 06 78 80 60 43 2d 10 1b 47 71 2b b7 |%....x.`C-..Gq+.|
+  0280: 7f bb e9 a7 e4 7d 67 7b df 9b f7 62 cf cd d8 f4 |.....}g{...b....|
+  0290: 48 bc 64 51 57 43 ff ea 8b 0b ae 74 64 53 07 86 |H.dQWC.....tdS..|
+  02a0: fa 66 3c 5e f7 e1 af a7 c2 90 ff a7 be 9e c9 29 |.f<^...........)|
+  02b0: b6 cc 41 48 18 69 94 8b 7c 04 7d 8c 98 a7 95 50 |..AH.i..|.}....P|
+  02c0: 44 d9 d0 20 c8 14 30 14 51 ad 6c 16 03 94 0f 5a |D.. ..0.Q.l....Z|
+  02d0: 46 93 7f 1c 87 8d 25 d7 9d a2 d1 92 4c f3 c2 54 |F.....%.....L..T|
+  02e0: ba f8 70 18 ca 24 0a 29 96 43 71 f2 93 95 74 18 |..p..$.).Cq...t.|
+  02f0: b5 65 c4 b8 f6 6c 5c 34 20 1e d5 0c 21 c0 b1 90 |.e...l\4 ...!...|
+  0300: 9e 12 40 b9 18 fa 5a 00 41 a2 39 d3 a9 c1 73 21 |..@...Z.A.9...s!|
+  0310: 8e 5e 3c b9 b8 f8 48 6a 76 46 a7 1a b6 dd 5b 51 |.^<...HjvF....[Q|
+  0320: 5e 19 1d 59 12 c6 32 89 02 9a c0 8f 4f b8 0a ba |^..Y..2.....O...|
+  0330: 5e ec 58 37 44 a3 2f dd 33 ed c9 d3 dd c7 22 1b |^.X7D./.3.....".|
+  0340: 2f d4 94 8e 95 3f 77 a7 ae 6e f3 32 8d bb 4a 4c |/....?w..n.2..JL|
+  0350: b8 0a 5a 43 34 3a b3 3a d6 77 ff 5c b6 fa ad f9 |..ZC4:.:.w.\....|
+  0360: db fb 6a 33 df c1 7d 99 cf ef d4 d5 6d da 77 7c |..j3..}.....m.w||
+  0370: 3b 19 fd af c5 3f f1 60 c3 17                   |;....?.`..|
+  $ hg debugbundle ../rev.hg2.bz
+  Stream params: {'Compression': 'GZ'}
+  changegroup -- '{}'
+      32af7686d403cf45b5d95f2d70cebea587ac806a
+      9520eea781bcca16c1e15acc0ba14335a0e8e5ba
+      eea13746799a9e0bfd88f29d3c2e9dc9389f524f
+      02de42196ebee42ef284b6780a87cdc96e8eaab6
+  $ hg unbundle ../rev.hg2.bz
+  adding changesets
+  adding manifests
+  adding file changes
+  added 0 changesets with 0 changes to 3 files
+  (run 'hg update' to get a working copy)
+Simple case where it just work: BZ
+----------------------------------
+
+  $ hg bundle2 --compress BZ --rev '8+7+5+4' ../rev.hg2.bz
+  $ f --hexdump ../rev.hg2.bz
+  ../rev.hg2.bz:
+  0000: 48 47 32 30 00 00 00 0e 43 6f 6d 70 72 65 73 73 |HG20....Compress|
+  0010: 69 6f 6e 3d 42 5a 42 5a 68 39 31 41 59 26 53 59 |ion=BZBZh91AY&SY|
+  0020: a3 4b 18 3d 00 00 1a 7f ff ff bf 5f f6 ef ef 7f |.K.=......._....|
+  0030: f6 3f f7 d1 d9 ff ff f7 6e ff ff 6e f7 f6 bd df |.?......n..n....|
+  0040: b5 ab ff cf 67 f6 e7 7b f7 c0 02 d7 33 82 8b 51 |....g..{....3..Q|
+  0050: 04 a5 53 d5 3d 27 a0 99 18 4d 0d 34 00 d1 a1 e8 |..S.='...M.4....|
+  0060: 80 c8 7a 87 a9 a3 43 6a 3d 46 86 26 80 34 3d 40 |..z...Cj=F.&.4=@|
+  0070: c8 c9 b5 34 f4 8f 48 0f 51 ea 34 34 fd 4d aa 19 |...4..H.Q.44.M..|
+  0080: 03 40 0c 08 da 86 43 d4 f5 0f 42 1e a0 f3 54 33 |.@....C...B...T3|
+  0090: 54 d3 13 4d 03 40 32 00 00 32 03 26 80 0d 00 0d |T..M.@2..2.&....|
+  00a0: 00 68 c8 c8 03 20 32 30 98 8c 80 00 00 03 4d 00 |.h... 20......M.|
+  00b0: c8 00 00 0d 00 00 22 99 a1 34 c2 64 a6 d5 34 1a |......"..4.d..4.|
+  00c0: 00 00 06 86 83 4d 07 a8 d1 a0 68 01 a0 00 00 00 |.....M....h.....|
+  00d0: 00 0d 06 80 00 00 00 0d 00 03 40 00 00 04 a4 a1 |..........@.....|
+  00e0: 4d a9 89 89 b4 9a 32 0c 43 46 86 87 a9 8d 41 9a |M.....2.CF....A.|
+  00f0: 98 46 9a 0d 31 32 1a 34 0d 0c 8d a2 0c 98 4d 06 |.F..12.4......M.|
+  0100: 8c 40 c2 60 8d 0d 0c 20 c9 89 fa a0 d0 d3 21 a1 |.@.`... ......!.|
+  0110: ea 34 d3 68 9e a6 d1 74 05 33 cb 66 96 93 28 64 |.4.h...t.3.f..(d|
+  0120: 40 91 22 ac 55 9b ea 40 7b 38 94 e2 f8 06 00 cb |@.".U..@{8......|
+  0130: 28 02 00 4d ab 40 24 10 43 18 cf 64 b4 06 83 0c |(..M.@$.C..d....|
+  0140: 34 6c b4 a3 d4 0a 0a e4 a8 5c 4e 23 c0 c9 7a 31 |4l.......\N#..z1|
+  0150: 97 87 77 7a 64 88 80 8e 60 97 20 93 0f 8e eb c4 |..wzd...`. .....|
+  0160: 62 a4 44 a3 52 20 b2 99 a9 2e e1 d7 29 4a 54 ac |b.D.R ......)JT.|
+  0170: 44 7a bb cc 04 3d e0 aa bd 6a 33 5e 9b a2 57 36 |Dz...=...j3^..W6|
+  0180: fa cb 45 bb 6d 3e c1 d9 d9 f5 83 69 8a d0 e0 e2 |..E.m>.....i....|
+  0190: e7 ae 90 55 24 da 3f ab 78 c0 4c b4 56 a3 9e a4 |...U$.?.x.L.V...|
+  01a0: af 9c 65 74 86 ec 6d dc 62 dc 33 ca c8 50 dd 9d |..et..m.b.3..P..|
+  01b0: 98 8e 9e 59 20 f3 f0 42 91 4a 09 f5 75 8d 3d a5 |...Y ..B.J..u.=.|
+  01c0: a5 15 cb 8d 10 63 b0 c2 2e b2 81 f7 c1 76 0e 53 |.....c.......v.S|
+  01d0: 6c 0e 46 73 b5 ae 67 f9 4c 0b 45 6b a8 32 2a 2f |l.Fs..g.L.Ek.2*/|
+  01e0: a2 54 a4 44 05 20 a1 38 d1 a4 c6 09 a8 2b 08 99 |.T.D. .8.....+..|
+  01f0: a4 14 ae 8d a3 e3 aa 34 27 d8 44 ca c3 5d 21 8b |.......4'.D..]!.|
+  0200: 1a 1e 97 29 71 2b 09 4a 4a 55 55 94 58 65 b2 bc |...)q+.JJUU.Xe..|
+  0210: f3 a5 90 26 36 76 67 7a 51 98 d6 8a 4a 99 50 b5 |...&6vgzQ...J.P.|
+  0220: 99 8f 94 21 17 a9 8b f3 ad 4c 33 d4 2e 40 c8 0c |...!.....L3..@..|
+  0230: 3b 90 53 39 db 48 02 34 83 48 d6 b3 99 13 d2 58 |;.S9.H.4.H.....X|
+  0240: 65 8e 71 ac a9 06 95 f2 c4 8e b4 08 6b d3 0c ae |e.q.........k...|
+  0250: d9 90 56 71 43 a7 a2 62 16 3e 50 63 d3 57 3c 2d |..VqC..b.>Pc.W<-|
+  0260: 9f 0f 34 05 08 d8 a6 4b 59 31 54 66 3a 45 0c 8a |..4....KY1Tf:E..|
+  0270: c7 90 3a f0 6a 83 1b f5 ca fb 80 2b 50 06 fb 51 |..:.j......+P..Q|
+  0280: 7e a6 a4 d4 81 44 82 21 54 00 5b 1a 30 83 62 a3 |~....D.!T.[.0.b.|
+  0290: 18 b6 24 19 1e 45 df 4d 5c db a6 af 5b ac 90 fa |..$..E.M\...[...|
+  02a0: 3e ed f9 ec 4c ba 36 ee d8 60 20 a7 c7 3b cb d1 |>...L.6..` ..;..|
+  02b0: 90 43 7d 27 16 50 5d ad f4 14 07 0b 90 5c cc 6b |.C}'.P]......\.k|
+  02c0: 8d 3f a6 88 f4 34 37 a8 cf 14 63 36 19 f7 3e 28 |.?...47...c6..>(|
+  02d0: de 99 e8 16 a4 9d 0d 40 a1 a7 24 52 14 a6 72 62 |.......@..$R..rb|
+  02e0: 59 5a ca 2d e5 51 90 78 88 d9 c6 c7 21 d0 f7 46 |YZ.-.Q.x....!..F|
+  02f0: b2 04 46 44 4e 20 9c 12 b1 03 4e 25 e0 a9 0c 58 |..FDN ....N%...X|
+  0300: 5b 1d 3c 93 20 01 51 de a9 1c 69 23 32 46 14 b4 |[.<. .Q...i#2F..|
+  0310: 90 db 17 98 98 50 03 90 29 aa 40 b0 13 d8 43 d2 |.....P..).@...C.|
+  0320: 5f c5 9d eb f3 f2 ad 41 e8 7a a9 ed a1 58 84 a6 |_......A.z...X..|
+  0330: 42 bf d6 fc 24 82 c1 20 32 26 4a 15 a6 1d 29 7f |B...$.. 2&J...).|
+  0340: 7e f4 3d 07 bc 62 9a 5b ec 44 3d 72 1d 41 8b 5c |~.=..b.[.D=r.A.\|
+  0350: 80 de 0e 62 9a 2e f8 83 00 d5 07 a0 9c c6 74 98 |...b..........t.|
+  0360: 11 b2 5e a9 38 02 03 ee fd 86 5c f4 86 b3 ae da |..^.8.....\.....|
+  0370: 05 94 01 c5 c6 ea 18 e6 ba 2a ba b3 04 5c 96 89 |.........*...\..|
+  0380: 72 63 5b 10 11 f6 67 34 98 cb e4 c0 4e fa e6 99 |rc[...g4....N...|
+  0390: 19 6e 50 e8 26 8d 0c 17 e0 be ef e1 8e 02 6f 32 |.nP.&.........o2|
+  03a0: 82 dc 26 f8 a1 08 f3 8a 0d f3 c4 75 00 48 73 b8 |..&........u.Hs.|
+  03b0: be 3b 0d 7f d0 fd c7 78 96 ec e0 03 80 68 4d 8d |.;.....x.....hM.|
+  03c0: 43 8c d7 68 58 f9 50 f0 18 cb 21 58 1b 60 cd 1f |C..hX.P...!X.`..|
+  03d0: 84 36 2e 16 1f 0a f7 4e 8f eb df 01 2d c2 79 0b |.6.....N....-.y.|
+  03e0: f7 24 ea 0d e8 59 86 51 6e 1c 30 a3 ad 2f ee 8c |.$...Y.Qn.0../..|
+  03f0: 90 c8 84 d5 e8 34 c1 95 b2 c9 f6 4d 87 1c 7d 19 |.....4.....M..}.|
+  0400: d6 41 58 56 7a e0 6c ba 10 c7 e8 33 39 36 96 e7 |.AXVz.l....396..|
+  0410: d2 f9 59 9a 08 95 48 38 e7 0b b7 0a 24 67 c4 39 |..Y...H8....$g.9|
+  0420: 8b 43 88 57 9c 01 f5 61 b5 e1 27 41 7e af 83 fe |.C.W...a..'A~...|
+  0430: 2e e4 8a 70 a1 21 46 96 30 7a                   |...p.!F.0z|
+  $ hg debugbundle ../rev.hg2.bz
+  Stream params: {'Compression': 'BZ'}
+  changegroup -- '{}'
+      32af7686d403cf45b5d95f2d70cebea587ac806a
+      9520eea781bcca16c1e15acc0ba14335a0e8e5ba
+      eea13746799a9e0bfd88f29d3c2e9dc9389f524f
+      02de42196ebee42ef284b6780a87cdc96e8eaab6
+  $ hg unbundle ../rev.hg2.bz
+  adding changesets
+  adding manifests
+  adding file changes
+  added 0 changesets with 0 changes to 3 files
+  (run 'hg update' to get a working copy)
+
+unknown compression while unbundling
+-----------------------------
+
+  $ hg bundle2 --param Compression=FooBarUnknown --rev '8+7+5+4' ../rev.hg2.bz
+  $ cat ../rev.hg2.bz | hg statbundle2
+  abort: unknown parameters: Stream Parameter - Compression='FooBarUnknown'
+  [255]
+  $ hg unbundle ../rev.hg2.bz
+  abort: ../rev.hg2.bz: unknown bundle feature, Stream Parameter - Compression='FooBarUnknown'
+  (see https://mercurial-scm.org/wiki/BundleFeature for more information)
+  [255]
 
   $ cd ..
--- a/tests/test-bundle2-remote-changegroup.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-bundle2-remote-changegroup.t	Tue Oct 20 15:59:10 2015 -0500
@@ -122,7 +122,7 @@
 
 Test a pull with an remote-changegroup
 
-  $ hg bundle -R repo --base '0:4' -r '5:7' bundle.hg
+  $ hg bundle -R repo --type v1 --base '0:4' -r '5:7' bundle.hg
   3 changesets found
   $ cat > repo/.hg/bundle2maker << EOF
   > remote-changegroup http://localhost:$HGPORT/bundle.hg bundle.hg
@@ -164,7 +164,7 @@
 
 Test a pull with an remote-changegroup and a following changegroup
 
-  $ hg bundle -R repo --base 2 -r '3:4' bundle2.hg
+  $ hg bundle -R repo --type v1 --base 2 -r '3:4' bundle2.hg
   2 changesets found
   $ cat > repo/.hg/bundle2maker << EOF
   > remote-changegroup http://localhost:$HGPORT/bundle2.hg bundle2.hg
@@ -212,7 +212,7 @@
 
 Test a pull with a changegroup followed by an remote-changegroup
 
-  $ hg bundle -R repo --base '0:4' -r '5:7' bundle3.hg
+  $ hg bundle -R repo --type v1 --base '0:4' -r '5:7' bundle3.hg
   3 changesets found
   $ cat > repo/.hg/bundle2maker << EOF
   > changegroup 000000000000 :4
@@ -260,9 +260,9 @@
 
 Test a pull with two remote-changegroups and a changegroup
 
-  $ hg bundle -R repo --base 2 -r '3:4' bundle4.hg
+  $ hg bundle -R repo --type v1 --base 2 -r '3:4' bundle4.hg
   2 changesets found
-  $ hg bundle -R repo --base '3:4' -r '5:6' bundle5.hg
+  $ hg bundle -R repo --type v1 --base '3:4' -r '5:6' bundle5.hg
   2 changesets found
   $ cat > repo/.hg/bundle2maker << EOF
   > remote-changegroup http://localhost:$HGPORT/bundle4.hg bundle4.hg
@@ -316,7 +316,7 @@
 
 Hash digest tests
 
-  $ hg bundle -R repo -a bundle6.hg
+  $ hg bundle -R repo --type v1 -a bundle6.hg
   8 changesets found
 
   $ cat > repo/.hg/bundle2maker << EOF
--- a/tests/test-casecollision-merge.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-casecollision-merge.t	Tue Oct 20 15:59:10 2015 -0500
@@ -191,6 +191,46 @@
 
   $ cd ..
 
+Prepare for tests of directory case-folding collisions
+
+  $ hg init directory-casing
+  $ cd directory-casing
+  $ touch 0 # test: file without directory
+  $ mkdir 0a
+  $ touch 0a/f
+  $ mkdir aA
+  $ touch aA/a
+  $ hg ci -Aqm0
+
+Directory/file case-folding collision:
+
+  $ hg up -q null
+  $ touch 00 # test: starts as '0'
+  $ mkdir 000 # test: starts as '0'
+  $ touch 000/f
+  $ touch Aa # test: collision with 'aA/a'
+  $ hg ci -Aqm1
+
+  $ hg merge 0
+  abort: case-folding collision between Aa and directory of aA/a
+  [255]
+(note: no collision between 0 and 00 or 000/f)
+
+Directory case-folding collision:
+
+  $ hg up -qC null
+  $ hg --config extensions.purge= purge
+  $ mkdir 0A0
+  $ touch 0A0/f # test: starts as '0a'
+  $ mkdir Aa
+  $ touch Aa/b # test: collision with 'aA/a'
+  $ hg ci -Aqm2
+
+  $ hg merge 0
+  3 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  (branch merge, don't forget to commit)
+
+  $ cd ..
 
 ################################
 test for linear updates
--- a/tests/test-churn.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-churn.t	Tue Oct 20 15:59:10 2015 -0500
@@ -171,7 +171,7 @@
   El Ni\xc3\xb1o         1 *************** (esc)
   with space      1 ***************
 
-Test --template argument, with backwards compatiblity
+Test --template argument, with backwards compatibility
 
   $ hg churn -t '{author|user}'
   user1      4 ***************************************************************
--- a/tests/test-clone-r.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-clone-r.t	Tue Oct 20 15:59:10 2015 -0500
@@ -218,3 +218,26 @@
   4 files, 9 changesets, 7 total revisions
   $ cd ..
 
+  $ hg clone test test-9
+  updating to branch default
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cd test-9
+  $ hg branch foobar
+  marked working directory as branch foobar
+  (branches are permanent and global, did you want a bookmark?)
+  $ echo file2 >> file2
+  $ hg add file2
+  $ hg commit -m "changeset9"
+  $ echo file3 >> file3
+  $ hg add file3
+  $ hg commit -m "changeset10"
+  $ cd ..
+  $ hg clone -r 9 -u foobar test-9 test-10
+  adding changesets
+  adding manifests
+  adding file changes
+  added 6 changesets with 6 changes to 3 files
+  updating to branch foobar
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+
--- a/tests/test-clone.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-clone.t	Tue Oct 20 15:59:10 2015 -0500
@@ -1020,7 +1020,7 @@
   $ hg -R a id -r 0
   acb14030fe0a
   $ hg id -R remote -r 0
-  abort: there is no Mercurial repository here (.hg not found)
+  abort: repository remote not found!
   [255]
   $ hg --config share.pool=share -q clone -e "python \"$TESTDIR/dummyssh\"" a ssh://user@dummy/remote
   $ hg -R remote id -r 0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-clonebundles.t	Tue Oct 20 15:59:10 2015 -0500
@@ -0,0 +1,431 @@
+Set up a server
+
+  $ hg init server
+  $ cd server
+  $ cat >> .hg/hgrc << EOF
+  > [extensions]
+  > clonebundles =
+  > EOF
+
+  $ touch foo
+  $ hg -q commit -A -m 'add foo'
+  $ touch bar
+  $ hg -q commit -A -m 'add bar'
+
+  $ hg serve -d -p $HGPORT --pid-file hg.pid --accesslog access.log
+  $ cat hg.pid >> $DAEMON_PIDS
+  $ cd ..
+
+Feature disabled by default
+(client should not request manifest)
+
+  $ hg clone -U http://localhost:$HGPORT feature-disabled
+  requesting all changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 2 changesets with 2 changes to 2 files
+
+  $ cat server/access.log
+  * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
+  * - - [*] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D (glob)
+  * - - [*] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=aaff8d2ffbbf07a46dd1f05d8ae7877e3f56e2a2&listkeys=phase%2Cbookmarks (glob)
+  * - - [*] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases (glob)
+
+  $ cat >> $HGRCPATH << EOF
+  > [experimental]
+  > clonebundles = true
+  > EOF
+
+Missing manifest should not result in server lookup
+
+  $ hg --verbose clone -U http://localhost:$HGPORT no-manifest
+  requesting all changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 2 changesets with 2 changes to 2 files
+
+  $ tail -4 server/access.log
+  * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
+  * - - [*] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D (glob)
+  * - - [*] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=aaff8d2ffbbf07a46dd1f05d8ae7877e3f56e2a2&listkeys=phase%2Cbookmarks (glob)
+  * - - [*] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases (glob)
+
+Empty manifest file results in retrieval
+(the extension only checks if the manifest file exists)
+
+  $ touch server/.hg/clonebundles.manifest
+  $ hg --verbose clone -U http://localhost:$HGPORT empty-manifest
+  no clone bundles available on remote; falling back to regular clone
+  requesting all changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 2 changesets with 2 changes to 2 files
+
+Server advertises presence of feature to client requesting full clone
+
+  $ hg --config experimental.clonebundles=false clone -U http://localhost:$HGPORT advertise-on-clone
+  requesting all changes
+  remote: this server supports the experimental "clone bundles" feature that should enable faster and more reliable cloning
+  remote: help test it by setting the "experimental.clonebundles" config flag to "true"
+  adding changesets
+  adding manifests
+  adding file changes
+  added 2 changesets with 2 changes to 2 files
+
+Manifest file with invalid URL aborts
+
+  $ echo 'http://does.not.exist/bundle.hg' > server/.hg/clonebundles.manifest
+  $ hg clone http://localhost:$HGPORT 404-url
+  applying clone bundle from http://does.not.exist/bundle.hg
+  error fetching bundle: * not known (glob)
+  abort: error applying bundle
+  (if this error persists, consider contacting the server operator or disable clone bundles via "--config experimental.clonebundles=false")
+  [255]
+
+Server is not running aborts
+
+  $ echo "http://localhost:$HGPORT1/bundle.hg" > server/.hg/clonebundles.manifest
+  $ hg clone http://localhost:$HGPORT server-not-runner
+  applying clone bundle from http://localhost:$HGPORT1/bundle.hg
+  error fetching bundle: Connection refused
+  abort: error applying bundle
+  (if this error persists, consider contacting the server operator or disable clone bundles via "--config experimental.clonebundles=false")
+  [255]
+
+Server returns 404
+
+  $ python $TESTDIR/dumbhttp.py -p $HGPORT1 --pid http.pid
+  $ cat http.pid >> $DAEMON_PIDS
+  $ hg clone http://localhost:$HGPORT running-404
+  applying clone bundle from http://localhost:$HGPORT1/bundle.hg
+  HTTP error fetching bundle: HTTP Error 404: File not found
+  abort: error applying bundle
+  (if this error persists, consider contacting the server operator or disable clone bundles via "--config experimental.clonebundles=false")
+  [255]
+
+We can override failure to fall back to regular clone
+
+  $ hg --config ui.clonebundlefallback=true clone -U http://localhost:$HGPORT 404-fallback
+  applying clone bundle from http://localhost:$HGPORT1/bundle.hg
+  HTTP error fetching bundle: HTTP Error 404: File not found
+  falling back to normal clone
+  requesting all changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 2 changesets with 2 changes to 2 files
+
+Bundle with partial content works
+
+  $ hg -R server bundle --type gzip-v1 --base null -r 53245c60e682 partial.hg
+  1 changesets found
+
+We verify exact bundle content as an extra check against accidental future
+changes. If this output changes, we could break old clients.
+
+  $ f --size --hexdump partial.hg
+  partial.hg: size=208
+  0000: 48 47 31 30 47 5a 78 9c 63 60 60 98 17 ac 12 93 |HG10GZx.c``.....|
+  0010: f0 ac a9 23 45 70 cb bf 0d 5f 59 4e 4a 7f 79 21 |...#Ep..._YNJ.y!|
+  0020: 9b cc 40 24 20 a0 d7 ce 2c d1 38 25 cd 24 25 d5 |..@$ ...,.8%.$%.|
+  0030: d8 c2 22 cd 38 d9 24 cd 22 d5 c8 22 cd 24 cd 32 |..".8.$."..".$.2|
+  0040: d1 c2 d0 c4 c8 d2 32 d1 38 39 29 c9 34 cd d4 80 |......2.89).4...|
+  0050: ab 24 b5 b8 84 cb 40 c1 80 2b 2d 3f 9f 8b 2b 31 |.$....@..+-?..+1|
+  0060: 25 45 01 c8 80 9a d2 9b 65 fb e5 9e 45 bf 8d 7f |%E......e...E...|
+  0070: 9f c6 97 9f 2b 44 34 67 d9 ec 8e 0f a0 92 0b 75 |....+D4g.......u|
+  0080: 41 d6 24 59 18 a4 a4 9a a6 18 1a 5b 98 9b 5a 98 |A.$Y.......[..Z.|
+  0090: 9a 18 26 9b a6 19 98 1a 99 99 26 a6 18 9a 98 24 |..&.......&....$|
+  00a0: 26 59 a6 25 5a 98 a5 18 a6 24 71 41 35 b1 43 dc |&Y.%Z....$qA5.C.|
+  00b0: 96 b0 83 f7 e9 45 8b d2 56 c7 a3 1f 82 52 d7 8a |.....E..V....R..|
+  00c0: 78 ed fc d5 76 f1 36 95 dc 05 07 00 ad 39 5e d3 |x...v.6......9^.|
+
+  $ echo "http://localhost:$HGPORT1/partial.hg" > server/.hg/clonebundles.manifest
+  $ hg clone -U http://localhost:$HGPORT partial-bundle
+  applying clone bundle from http://localhost:$HGPORT1/partial.hg
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 1 changes to 1 files
+  finished applying clone bundle
+  searching for changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 1 changes to 1 files
+
+Bundle with full content works
+
+  $ hg -R server bundle --type gzip-v2 --base null -r tip full.hg
+  2 changesets found
+
+Again, we perform an extra check against bundle content changes. If this content
+changes, clone bundles produced by new Mercurial versions may not be readable
+by old clients.
+
+  $ f --size --hexdump full.hg
+  full.hg: size=408
+  0000: 48 47 32 30 00 00 00 0e 43 6f 6d 70 72 65 73 73 |HG20....Compress|
+  0010: 69 6f 6e 3d 47 5a 78 9c 63 60 60 90 e5 76 f6 70 |ion=GZx.c``..v.p|
+  0020: f4 73 77 75 0f f2 0f 0d 60 00 02 46 06 76 a6 b2 |.swu....`..F.v..|
+  0030: d4 a2 e2 cc fc 3c 03 23 06 06 e6 7d 40 b1 4d c1 |.....<.#...}@.M.|
+  0040: 2a 31 09 cf 9a 3a 52 04 b7 fc db f0 95 e5 a4 f4 |*1...:R.........|
+  0050: 97 17 b2 c9 0c 14 00 02 e6 d9 99 25 1a a7 a4 99 |...........%....|
+  0060: a4 a4 1a 5b 58 a4 19 27 9b a4 59 a4 1a 59 a4 99 |...[X..'..Y..Y..|
+  0070: a4 59 26 5a 18 9a 18 59 5a 26 1a 27 27 25 99 a6 |.Y&Z...YZ&.''%..|
+  0080: 99 1a 70 95 a4 16 97 70 19 28 18 70 a5 e5 e7 73 |..p....p.(.p...s|
+  0090: 71 25 a6 a4 28 00 19 40 13 0e ac fa df ab ff 7b |q%..(..@.......{|
+  00a0: 3f fb 92 dc 8b 1f 62 bb 9e b7 d7 d9 87 3d 5a 44 |?.....b......=ZD|
+  00b0: ac 2f b0 a9 c3 66 1e 54 b9 26 08 a7 1a 1b 1a a7 |./...f.T.&......|
+  00c0: 25 1b 9a 1b 99 19 9a 5a 18 9b a6 18 19 00 dd 67 |%......Z.......g|
+  00d0: 61 61 98 06 f4 80 49 4a 8a 65 52 92 41 9a 81 81 |aa....IJ.eR.A...|
+  00e0: a5 11 17 50 31 30 58 19 cc 80 98 25 29 b1 08 c4 |...P10X....%)...|
+  00f0: 37 07 79 19 88 d9 41 ee 07 8a 41 cd 5d 98 65 fb |7.y...A...A.].e.|
+  0100: e5 9e 45 bf 8d 7f 9f c6 97 9f 2b 44 34 67 d9 ec |..E.......+D4g..|
+  0110: 8e 0f a0 61 a8 eb 82 82 2e c9 c2 20 25 d5 34 c5 |...a....... %.4.|
+  0120: d0 d8 c2 dc d4 c2 d4 c4 30 d9 34 cd c0 d4 c8 cc |........0.4.....|
+  0130: 34 31 c5 d0 c4 24 31 c9 32 2d d1 c2 2c c5 30 25 |41...$1.2-..,.0%|
+  0140: 09 e4 ee 85 8f 85 ff 88 ab 89 36 c7 2a c4 47 34 |..........6.*.G4|
+  0150: fe f8 ec 7b 73 37 3f c3 24 62 1d 8d 4d 1d 9e 40 |...{s7?.$b..M..@|
+  0160: 06 3b 10 14 36 a4 38 10 04 d8 21 01 5a b2 83 f7 |.;..6.8...!.Z...|
+  0170: e9 45 8b d2 56 c7 a3 1f 82 52 d7 8a 78 ed fc d5 |.E..V....R..x...|
+  0180: 76 f1 36 25 81 49 c0 ad 30 c0 0e 49 8f 54 b7 9e |v.6%.I..0..I.T..|
+  0190: d4 1c 09 00 bb 8d f0 bd                         |........|
+
+  $ echo "http://localhost:$HGPORT1/full.hg" > server/.hg/clonebundles.manifest
+  $ hg clone -U http://localhost:$HGPORT full-bundle
+  applying clone bundle from http://localhost:$HGPORT1/full.hg
+  adding changesets
+  adding manifests
+  adding file changes
+  added 2 changesets with 2 changes to 2 files
+  finished applying clone bundle
+  searching for changes
+  no changes found
+
+Entry with unknown BUNDLESPEC is filtered and not used
+
+  $ cat > server/.hg/clonebundles.manifest << EOF
+  > http://bad.entry1 BUNDLESPEC=UNKNOWN
+  > http://bad.entry2 BUNDLESPEC=xz-v1
+  > http://bad.entry3 BUNDLESPEC=none-v100
+  > http://localhost:$HGPORT1/full.hg BUNDLESPEC=gzip-v2
+  > EOF
+
+  $ hg clone -U http://localhost:$HGPORT filter-unknown-type
+  applying clone bundle from http://localhost:$HGPORT1/full.hg
+  adding changesets
+  adding manifests
+  adding file changes
+  added 2 changesets with 2 changes to 2 files
+  finished applying clone bundle
+  searching for changes
+  no changes found
+
+Automatic fallback when all entries are filtered
+
+  $ cat > server/.hg/clonebundles.manifest << EOF
+  > http://bad.entry BUNDLESPEC=UNKNOWN
+  > EOF
+
+  $ hg clone -U http://localhost:$HGPORT filter-all
+  no compatible clone bundles available on server; falling back to regular clone
+  (you may want to report this to the server operator)
+  requesting all changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 2 changesets with 2 changes to 2 files
+
+URLs requiring SNI are filtered in Python <2.7.9
+
+  $ cp full.hg sni.hg
+  $ cat > server/.hg/clonebundles.manifest << EOF
+  > http://localhost:$HGPORT1/sni.hg REQUIRESNI=true
+  > http://localhost:$HGPORT1/full.hg
+  > EOF
+
+#if sslcontext
+Python 2.7.9+ support SNI
+
+  $ hg clone -U http://localhost:$HGPORT sni-supported
+  applying clone bundle from http://localhost:$HGPORT1/sni.hg
+  adding changesets
+  adding manifests
+  adding file changes
+  added 2 changesets with 2 changes to 2 files
+  finished applying clone bundle
+  searching for changes
+  no changes found
+#else
+Python <2.7.9 will filter SNI URLs
+
+  $ hg clone -U http://localhost:$HGPORT sni-unsupported
+  applying clone bundle from http://localhost:$HGPORT1/full.hg
+  adding changesets
+  adding manifests
+  adding file changes
+  added 2 changesets with 2 changes to 2 files
+  finished applying clone bundle
+  searching for changes
+  no changes found
+#endif
+
+Stream clone bundles are supported
+
+  $ hg -R server debugcreatestreamclonebundle packed.hg
+  writing 613 bytes for 4 files
+  bundle requirements: revlogv1
+
+No bundle spec should work
+
+  $ cat > server/.hg/clonebundles.manifest << EOF
+  > http://localhost:$HGPORT1/packed.hg
+  > EOF
+
+  $ hg clone -U http://localhost:$HGPORT stream-clone-no-spec
+  applying clone bundle from http://localhost:$HGPORT1/packed.hg
+  4 files to transfer, 613 bytes of data
+  transferred 613 bytes in *.* seconds (*) (glob)
+  finished applying clone bundle
+  searching for changes
+  no changes found
+
+Bundle spec without parameters should work
+
+  $ cat > server/.hg/clonebundles.manifest << EOF
+  > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1
+  > EOF
+
+  $ hg clone -U http://localhost:$HGPORT stream-clone-vanilla-spec
+  applying clone bundle from http://localhost:$HGPORT1/packed.hg
+  4 files to transfer, 613 bytes of data
+  transferred 613 bytes in *.* seconds (*) (glob)
+  finished applying clone bundle
+  searching for changes
+  no changes found
+
+Bundle spec with format requirements should work
+
+  $ cat > server/.hg/clonebundles.manifest << EOF
+  > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1;requirements%3Drevlogv1
+  > EOF
+
+  $ hg clone -U http://localhost:$HGPORT stream-clone-supported-requirements
+  applying clone bundle from http://localhost:$HGPORT1/packed.hg
+  4 files to transfer, 613 bytes of data
+  transferred 613 bytes in *.* seconds (*) (glob)
+  finished applying clone bundle
+  searching for changes
+  no changes found
+
+Stream bundle spec with unknown requirements should be filtered out
+
+  $ cat > server/.hg/clonebundles.manifest << EOF
+  > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1;requirements%3Drevlogv42
+  > EOF
+
+  $ hg clone -U http://localhost:$HGPORT stream-clone-unsupported-requirements
+  no compatible clone bundles available on server; falling back to regular clone
+  (you may want to report this to the server operator)
+  requesting all changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 2 changesets with 2 changes to 2 files
+
+Set up manifest for testing preferences
+(Remember, the TYPE does not have to match reality - the URL is
+important)
+
+  $ cp full.hg gz-a.hg
+  $ cp full.hg gz-b.hg
+  $ cp full.hg bz2-a.hg
+  $ cp full.hg bz2-b.hg
+  $ cat > server/.hg/clonebundles.manifest << EOF
+  > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2 extra=a
+  > http://localhost:$HGPORT1/bz2-a.hg BUNDLESPEC=bzip2-v2 extra=a
+  > http://localhost:$HGPORT1/gz-b.hg BUNDLESPEC=gzip-v2 extra=b
+  > http://localhost:$HGPORT1/bz2-b.hg BUNDLESPEC=bzip2-v2 extra=b
+  > EOF
+
+Preferring an undefined attribute will take first entry
+
+  $ hg --config experimental.clonebundleprefers=foo=bar clone -U http://localhost:$HGPORT prefer-foo
+  applying clone bundle from http://localhost:$HGPORT1/gz-a.hg
+  adding changesets
+  adding manifests
+  adding file changes
+  added 2 changesets with 2 changes to 2 files
+  finished applying clone bundle
+  searching for changes
+  no changes found
+
+Preferring bz2 type will download first entry of that type
+
+  $ hg --config experimental.clonebundleprefers=COMPRESSION=bzip2 clone -U http://localhost:$HGPORT prefer-bz
+  applying clone bundle from http://localhost:$HGPORT1/bz2-a.hg
+  adding changesets
+  adding manifests
+  adding file changes
+  added 2 changesets with 2 changes to 2 files
+  finished applying clone bundle
+  searching for changes
+  no changes found
+
+Preferring multiple values of an option works
+
+  $ hg --config experimental.clonebundleprefers=COMPRESSION=unknown,COMPRESSION=bzip2 clone -U http://localhost:$HGPORT prefer-multiple-bz
+  applying clone bundle from http://localhost:$HGPORT1/bz2-a.hg
+  adding changesets
+  adding manifests
+  adding file changes
+  added 2 changesets with 2 changes to 2 files
+  finished applying clone bundle
+  searching for changes
+  no changes found
+
+Sorting multiple values should get us back to original first entry
+
+  $ hg --config experimental.clonebundleprefers=BUNDLESPEC=unknown,BUNDLESPEC=gzip-v2,BUNDLESPEC=bzip2-v2 clone -U http://localhost:$HGPORT prefer-multiple-gz
+  applying clone bundle from http://localhost:$HGPORT1/gz-a.hg
+  adding changesets
+  adding manifests
+  adding file changes
+  added 2 changesets with 2 changes to 2 files
+  finished applying clone bundle
+  searching for changes
+  no changes found
+
+Preferring multiple attributes has correct order
+
+  $ hg --config experimental.clonebundleprefers=extra=b,BUNDLESPEC=bzip2-v2 clone -U http://localhost:$HGPORT prefer-separate-attributes
+  applying clone bundle from http://localhost:$HGPORT1/bz2-b.hg
+  adding changesets
+  adding manifests
+  adding file changes
+  added 2 changesets with 2 changes to 2 files
+  finished applying clone bundle
+  searching for changes
+  no changes found
+
+Test where attribute is missing from some entries
+
+  $ cat > server/.hg/clonebundles.manifest << EOF
+  > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2
+  > http://localhost:$HGPORT1/bz2-a.hg BUNDLESPEC=bzip2-v2
+  > http://localhost:$HGPORT1/gz-b.hg BUNDLESPEC=gzip-v2 extra=b
+  > http://localhost:$HGPORT1/bz2-b.hg BUNDLESPEC=bzip2-v2 extra=b
+  > EOF
+
+  $ hg --config experimental.clonebundleprefers=extra=b clone -U http://localhost:$HGPORT prefer-partially-defined-attribute
+  applying clone bundle from http://localhost:$HGPORT1/gz-b.hg
+  adding changesets
+  adding manifests
+  adding file changes
+  added 2 changesets with 2 changes to 2 files
+  finished applying clone bundle
+  searching for changes
+  no changes found
--- a/tests/test-command-template.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-command-template.t	Tue Oct 20 15:59:10 2015 -0500
@@ -343,6 +343,11 @@
 
 Test xml styles:
 
+  $ hg log --style xml -r 'not all()'
+  <?xml version="1.0"?>
+  <log>
+  </log>
+
   $ hg log --style xml
   <?xml version="1.0"?>
   <log>
@@ -2495,10 +2500,14 @@
   abort: template filter 'escape' is not compatible with keyword 'date'
   [255]
 
+  $ hg log -l 3 --template 'line: {extras|localdate}\n'
+  hg: parse error: localdate expects a date information
+  [255]
+
 Behind the scenes, this will throw ValueError
 
   $ hg tip --template '{author|email|date}\n'
-  abort: template filter 'datefilter' is not compatible with keyword 'author'
+  hg: parse error: date expects a date information
   [255]
 
 Error in nested template:
@@ -2681,6 +2690,32 @@
   1: t1+0
   0: null+1
 
+  $ hg log --template "{rev}: {latesttag % '{tag}+{distance},{changes} '}\n"
+  10: t5+5,5 
+  9: t5+4,4 
+  8: t5+3,3 
+  7: t5+2,2 
+  6: t5+1,1 
+  5: t5+0,0 
+  4: at3+1,1 t3+1,1 
+  3: at3+0,0 t3+0,0 
+  2: t2+0,0 
+  1: t1+0,0 
+  0: null+1,1 
+
+  $ hg log --template "{rev}: {latesttag('re:^t[13]$') % '{tag}, C: {changes}, D: {distance}'}\n"
+  10: t3, C: 8, D: 7
+  9: t3, C: 7, D: 6
+  8: t3, C: 6, D: 5
+  7: t3, C: 5, D: 4
+  6: t3, C: 4, D: 3
+  5: t3, C: 3, D: 2
+  4: t3, C: 1, D: 1
+  3: t3, C: 0, D: 0
+  2: t1, C: 1, D: 1
+  1: t1, C: 0, D: 0
+  0: null, C: 1, D: 1
+
   $ cd ..
 
 
@@ -2727,6 +2762,13 @@
   $ hg log -R latesttag -r 10 --template '{sub("[0-9]", "x", "{rev}")}\n'
   xx
 
+  $ hg log -R latesttag -r 10 -T '{sub("[", "x", rev)}\n'
+  hg: parse error: sub got an invalid pattern: [
+  [255]
+  $ hg log -R latesttag -r 10 -T '{sub("[0-9]", r"\1", rev)}\n'
+  hg: parse error: sub got an invalid replacement: \1
+  [255]
+
 Test the strip function with chars specified:
 
   $ hg log -R latesttag --template '{desc}\n'
@@ -2925,10 +2967,10 @@
   hg: parse error at 21: unterminated string
   [255]
   $ hg log -r 2 -T '{if(rev, \"\\"")}\n'
-  hg: parse error at 11: syntax error
+  hg: parse error: trailing \ in string
   [255]
   $ hg log -r 2 -T '{if(rev, r\"\\"")}\n'
-  hg: parse error at 12: syntax error
+  hg: parse error: trailing \ in string
   [255]
 
   $ cd ..
@@ -3105,6 +3147,25 @@
   hg: parse error: get() expects a dict as first argument
   [255]
 
+Test localdate(date, tz) function:
+
+  $ TZ=JST-09 hg log -r0 -T '{date|localdate|isodate}\n'
+  1970-01-01 09:00 +0900
+  $ TZ=JST-09 hg log -r0 -T '{localdate(date, "UTC")|isodate}\n'
+  1970-01-01 00:00 +0000
+  $ TZ=JST-09 hg log -r0 -T '{localdate(date, "+0200")|isodate}\n'
+  1970-01-01 02:00 +0200
+  $ TZ=JST-09 hg log -r0 -T '{localdate(date, "0")|isodate}\n'
+  1970-01-01 00:00 +0000
+  $ TZ=JST-09 hg log -r0 -T '{localdate(date, 0)|isodate}\n'
+  1970-01-01 00:00 +0000
+  $ hg log -r0 -T '{localdate(date, "invalid")|isodate}\n'
+  hg: parse error: localdate expects a timezone
+  [255]
+  $ hg log -r0 -T '{localdate(date, date)|isodate}\n'
+  hg: parse error: localdate expects a timezone
+  [255]
+
 Test shortest(node) function:
 
   $ echo b > b
@@ -3117,6 +3178,8 @@
   e777603221
   bcc7ff960b
   f7769ec2ab
+  $ hg log --template '{node|shortest}\n' -l1
+  e777
 
 Test pad function
 
@@ -3197,6 +3260,23 @@
   $ hg log --template '{revset("TIP"|lower)}\n' -l1
   2
 
+ a list template is evaluated for each item of revset
+
+  $ hg log -T '{rev} p: {revset("p1(%s)", rev) % "{rev}:{node|short}"}\n'
+  2 p: 1:bcc7ff960b8e
+  1 p: 0:f7769ec2ab97
+  0 p: 
+
+ therefore, 'revcache' should be recreated for each rev
+
+  $ hg log -T '{rev} {file_adds}\np {revset("p1(%s)", rev) % "{file_adds}"}\n'
+  2 aa b
+  p 
+  1 
+  p a
+  0 a
+  p 
+
 Test active bookmark templating
 
   $ hg book foo
@@ -3390,3 +3470,12 @@
   $ hg log -T "{indent(date, '   ')}\n" -r 2:3 -R a
      1200000.00
      1300000.00
+
+Test broken string escapes:
+
+  $ hg log -T "bogus\\" -R a
+  hg: parse error: trailing \ in string
+  [255]
+  $ hg log -T "\\xy" -R a
+  hg: parse error: invalid \x escape
+  [255]
--- a/tests/test-commandserver.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-commandserver.t	Tue Oct 20 15:59:10 2015 -0500
@@ -322,7 +322,6 @@
   ...     runcommand(server, ['phase', '-r', '.'])
   *** runcommand phase -r . -p
   no phases changed
-   [1]
   *** runcommand commit -Am.
   *** runcommand rollback
   repository tip rolled back to revision 3 (undo commit)
@@ -379,7 +378,10 @@
   ...     runcommand(server, ['log', '-qr', 'draft()'])
   ...     # create draft commits by another process
   ...     for i in xrange(5, 7):
-  ...         os.system('echo a >> a')
+  ...         f = open('a', 'ab')
+  ...         f.seek(0, os.SEEK_END)
+  ...         f.write('a\n')
+  ...         f.close()
   ...         os.system('hg commit -Aqm%d' % i)
   ...     # new commits should be listed as draft revisions
   ...     runcommand(server, ['log', '-qr', 'draft()'])
@@ -458,6 +460,30 @@
   *** runcommand branches
   default                        1:731265503d86
 
+in-memory cache must be reloaded if transaction is aborted. otherwise
+changelog and manifest would have invalid node:
+
+  $ echo a >> a
+  >>> from hgclient import readchannel, runcommand, check
+  >>> @check
+  ... def txabort(server):
+  ...     readchannel(server)
+  ...     runcommand(server, ['commit', '--config', 'hooks.pretxncommit=false',
+  ...                         '-mfoo'])
+  ...     runcommand(server, ['verify'])
+  *** runcommand commit --config hooks.pretxncommit=false -mfoo
+  transaction abort!
+  rollback completed
+  abort: pretxncommit hook exited with status 1
+   [255]
+  *** runcommand verify
+  checking changesets
+  checking manifests
+  crosschecking files in changesets and manifests
+  checking files
+  1 files, 2 changesets, 2 total revisions
+  $ hg revert --no-backup -aq
+
   $ cat >> .hg/hgrc << EOF
   > [experimental]
   > evolution=createmarkers
@@ -632,6 +658,15 @@
   000000000000 tip
 
 
+don't fall back to cwd if invalid -R path is specified (issue4805):
+
+  $ cd repo
+  $ hg serve --cmdserver pipe -R ../nonexistent
+  abort: repository ../nonexistent not found!
+  [255]
+  $ cd ..
+
+
 unix domain socket:
 
   $ cd repo
--- a/tests/test-commit-amend.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-commit-amend.t	Tue Oct 20 15:59:10 2015 -0500
@@ -19,7 +19,7 @@
 
 Nothing to amend:
 
-  $ hg ci --amend
+  $ hg ci --amend -m 'base1'
   nothing changed
   [1]
 
@@ -627,8 +627,7 @@
   $ hg add cc
   $ hg ci -m aa
   $ hg merge -q bar
-  warning: conflicts during merge.
-  merging cc incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging cc! (edit, then use 'hg resolve --mark')
   [1]
   $ hg resolve -m cc
   (no more unresolved files)
--- a/tests/test-commit-interactive-curses.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-commit-interactive-curses.t	Tue Oct 20 15:59:10 2015 -0500
@@ -71,7 +71,7 @@
 - unfold it
 - go down to second hunk (1 for the first hunk, 1 for the first hunkline, 1 for the second hunk, 1 for the second hunklike)
 - toggle the second hunk
-- edit the hunk and quit the editor imediately with non-zero status
+- edit the hunk and quit the editor immediately with non-zero status
 - commit
 
   $ printf "printf 'editor ran\n'; exit 1" > editor.sh
--- a/tests/test-commit-unresolved.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-commit-unresolved.t	Tue Oct 20 15:59:10 2015 -0500
@@ -25,8 +25,7 @@
 
   $ hg merge
   merging A
-  warning: conflicts during merge.
-  merging A incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging A! (edit, then use 'hg resolve --mark')
   1 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
   [1]
--- a/tests/test-commit.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-commit.t	Tue Oct 20 15:59:10 2015 -0500
@@ -117,7 +117,7 @@
   $ echo fake >> .hg/requires
   $ hg commit -m bla
   abort: repository requires features unknown to this Mercurial: fake!
-  (see http://mercurial.selenic.com/wiki/MissingRequirement for more information)
+  (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
   [255]
 
   $ cd ..
@@ -650,3 +650,30 @@
   $ hg co --clean tip
   abort: path contains illegal component: HG8B6C~2/hgrc (glob)
   [255]
+
+# test that an unmodified commit template message aborts
+
+  $ hg init unmodified_commit_template
+  $ cd unmodified_commit_template
+  $ echo foo > foo
+  $ hg add foo
+  $ hg commit -m "foo"
+  $ cat >> .hg/hgrc <<EOF
+  > [committemplate]
+  > changeset.commit = HI THIS IS NOT STRIPPED
+  >     HG: this is customized commit template
+  >     HG: {extramsg}
+  >     {if(activebookmark,
+  >    "HG: bookmark '{activebookmark}' is activated\n",
+  >    "HG: no bookmark is activated\n")}{subrepos %
+  >    "HG: subrepo '{subrepo}' is changed\n"}
+  > EOF
+  $ cat > $TESTTMP/notouching.sh <<EOF
+  > true
+  > EOF
+  $ echo foo2 > foo2
+  $ hg add foo2
+  $ HGEDITOR="sh $TESTTMP/notouching.sh" hg commit
+  abort: commit message unchanged
+  [255]
+  $ cd ..
--- a/tests/test-completion.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-completion.t	Tue Oct 20 15:59:10 2015 -0500
@@ -69,17 +69,20 @@
 Show debug commands if there are no other candidates
   $ hg debugcomplete debug
   debugancestor
+  debugapplystreamclonebundle
   debugbuilddag
   debugbundle
   debugcheckstate
   debugcommands
   debugcomplete
   debugconfig
+  debugcreatestreamclonebundle
   debugdag
   debugdata
   debugdate
   debugdirstate
   debugdiscovery
+  debugextensions
   debugfileset
   debugfsinfo
   debuggetbundle
@@ -90,6 +93,7 @@
   debugknown
   debuglabelcomplete
   debuglocks
+  debugmergestate
   debugnamecomplete
   debugobsolete
   debugpathcomplete
@@ -229,16 +233,19 @@
   config: untrusted, edit, local, global
   copy: after, force, include, exclude, dry-run
   debugancestor: 
+  debugapplystreamclonebundle: 
   debugbuilddag: mergeable-file, overwritten-file, new-file
   debugbundle: all
   debugcheckstate: 
   debugcommands: 
   debugcomplete: options
+  debugcreatestreamclonebundle: 
   debugdag: tags, branches, dots, spaces
   debugdata: changelog, manifest, dir
   debugdate: extended
   debugdirstate: nodates, datesort
   debugdiscovery: old, nonheads, ssh, remotecmd, insecure
+  debugextensions: template
   debugfileset: rev
   debugfsinfo: 
   debuggetbundle: head, common, type
@@ -249,12 +256,13 @@
   debugknown: 
   debuglabelcomplete: 
   debuglocks: force-lock, force-wlock
+  debugmergestate: 
   debugnamecomplete: 
   debugobsolete: flags, record-parents, rev, date, user
   debugpathcomplete: full, normal, added, removed
   debugpushkey: 
   debugpvec: 
-  debugrebuilddirstate: rev
+  debugrebuilddirstate: rev, minimal
   debugrebuildfncache: 
   debugrename: rev
   debugrevlog: changelog, manifest, dir, dump
--- a/tests/test-conflict.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-conflict.t	Tue Oct 20 15:59:10 2015 -0500
@@ -36,8 +36,7 @@
 
   $ hg merge 1
   merging a
-  warning: conflicts during merge.
-  merging a incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
   0 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
   [1]
@@ -73,8 +72,7 @@
 
   $ hg merge 1
   merging a
-  warning: conflicts during merge.
-  merging a incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
   0 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
   [1]
@@ -105,8 +103,7 @@
   > EOF
 
   $ hg -q merge 1
-  warning: conflicts during merge.
-  merging a incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
   [1]
 
   $ cat a
@@ -148,8 +145,7 @@
   > EOF
 
   $ hg -q --encoding utf-8 merge 1
-  warning: conflicts during merge.
-  merging a incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
   [1]
 
   $ cat a
@@ -176,8 +172,7 @@
 
   $ hg merge 1
   merging a
-  warning: conflicts during merge.
-  merging a incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
   0 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
   [1]
@@ -205,8 +200,7 @@
 
   $ hg merge 1 --tool internal:merge3
   merging a
-  warning: conflicts during merge.
-  merging a incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
   0 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
   [1]
@@ -232,3 +226,64 @@
   5
   >>>>>>> other
   Hop we are done.
+
+Add some unconflicting changes on each head, to make sure we really
+are merging, unlike :local and :other
+
+  $ hg up -C
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ printf "\n\nEnd of file\n" >> a
+  $ hg ci -m "Add some stuff at the end"
+  $ hg up -r 1
+  1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  $ printf "Start of file\n\n\n" > tmp
+  $ cat a >> tmp
+  $ mv tmp a
+  $ hg ci -m "Add some stuff at the beginning"
+
+Now test :merge-other and :merge-local
+
+  $ hg merge
+  merging a
+  warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
+  1 files updated, 0 files merged, 0 files removed, 1 files unresolved
+  use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+  [1]
+  $ hg resolve --tool :merge-other a
+  merging a
+  (no more unresolved files)
+  $ cat a
+  Start of file
+  
+  
+  Small Mathematical Series.
+  1
+  2
+  3
+  6
+  8
+  Hop we are done.
+  
+  
+  End of file
+
+  $ hg up -C
+  1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  $ hg merge --tool :merge-local
+  merging a
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  (branch merge, don't forget to commit)
+  $ cat a
+  Start of file
+  
+  
+  Small Mathematical Series.
+  1
+  2
+  3
+  4
+  5
+  Hop we are done.
+  
+  
+  End of file
--- a/tests/test-contrib.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-contrib.t	Tue Oct 20 15:59:10 2015 -0500
@@ -153,7 +153,6 @@
   other
   end
   >>>>>>> conflict-other
-  warning: conflicts during merge.
   [1]
 
 1 label
@@ -167,7 +166,6 @@
   other
   end
   >>>>>>> conflict-other
-  warning: conflicts during merge.
   [1]
 
 2 labels
@@ -181,7 +179,6 @@
   other
   end
   >>>>>>> bar
-  warning: conflicts during merge.
   [1]
 
 3 labels
@@ -196,7 +193,6 @@
   other
   end
   >>>>>>> bar
-  warning: conflicts during merge.
   [1]
 
 too many labels
--- a/tests/test-convert-bzr-114.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-convert-bzr-114.t	Tue Oct 20 15:59:10 2015 -0500
@@ -1,4 +1,4 @@
-#require bzr114
+#require bzr bzr114
 
   $ . "$TESTDIR/bzr-definitions"
 
--- a/tests/test-convert-bzr-directories.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-convert-bzr-directories.t	Tue Oct 20 15:59:10 2015 -0500
@@ -1,3 +1,4 @@
+#require bzr
 
   $ . "$TESTDIR/bzr-definitions"
 
--- a/tests/test-convert-bzr-ghosts.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-convert-bzr-ghosts.t	Tue Oct 20 15:59:10 2015 -0500
@@ -1,3 +1,4 @@
+#require bzr
 
   $ . "$TESTDIR/bzr-definitions"
   $ cat > ghostcreator.py <<EOF
--- a/tests/test-convert-bzr-merges.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-convert-bzr-merges.t	Tue Oct 20 15:59:10 2015 -0500
@@ -1,3 +1,5 @@
+#require bzr
+
 N.B. bzr 1.13 has a bug that breaks this test.  If you see this
 test fail, check your bzr version.  Upgrading to bzr 1.13.1
 should fix it.
--- a/tests/test-convert-bzr-treeroot.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-convert-bzr-treeroot.t	Tue Oct 20 15:59:10 2015 -0500
@@ -1,3 +1,4 @@
+#require bzr
 
   $ . "$TESTDIR/bzr-definitions"
   $ cat > treeset.py <<EOF
--- a/tests/test-convert-bzr.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-convert-bzr.t	Tue Oct 20 15:59:10 2015 -0500
@@ -1,3 +1,5 @@
+#require bzr
+
   $ . "$TESTDIR/bzr-definitions"
 
 create and rename on the same file in the same step
--- a/tests/test-convert-cvs-branch.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-convert-cvs-branch.t	Tue Oct 20 15:59:10 2015 -0500
@@ -1,7 +1,7 @@
 #require cvs
 
-This is http://mercurial.selenic.com/bts/issue1148
-and http://mercurial.selenic.com/bts/issue1447
+This is https://bz.mercurial-scm.org/1148
+and https://bz.mercurial-scm.org/1447
 
   $ cvscall()
   > {
--- a/tests/test-convert-cvs.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-convert-cvs.t	Tue Oct 20 15:59:10 2015 -0500
@@ -223,6 +223,7 @@
   T a
   T b/c
   $ cvs -q update -r branch > /dev/null
+  $ sleep 1
   $ echo d >> b/c
   $ cvs -q commit -mci2 . | grep '<--'
   $TESTTMP/cvsrepo/src/b/c,v  <--  *c (glob)
@@ -333,13 +334,29 @@
 testing debugcvsps
 
   $ cd src
-  $ hg debugcvsps --fuzz=2
+  $ hg debugcvsps --fuzz=2 -x >/dev/null
+
+commit a new revision changing a and removing b/c
+
+  $ cvscall -q update -A
+  U a
+  U b/c
+  $ sleep 1
+  $ echo h >> a
+  $ cvscall -Q remove -f b/c
+  $ cvscall -q commit -mci | grep '<--'
+  $TESTTMP/cvsrepo/src/a,v  <--  a
+  $TESTTMP/cvsrepo/src/b/c,v  <--  *c (glob)
+
+update and verify the cvsps cache
+
+  $ hg debugcvsps --fuzz=2 -u
   collecting CVS rlog
-  11 log entries
-  cvslog hook: 11 entries
+  13 log entries
+  cvslog hook: 13 entries
   creating changesets
-  10 changeset entries
-  cvschangesets hook: 10 changesets
+  11 changeset entries
+  cvschangesets hook: 11 changesets
   ---------------------
   PatchSet 1 
   Date: * (glob)
@@ -466,5 +483,18 @@
   Members: 
   	b/c:1.1.2.1->1.1.2.2 
   
+  ---------------------
+  PatchSet 11 
+  Date: * (glob)
+  Author: * (glob)
+  Branch: HEAD
+  Tag: (none) 
+  Log:
+  ci
+  
+  Members: 
+  	a:1.2->1.3 
+  	b/c:1.3->1.4(DEAD) 
+  
 
   $ cd ..
--- a/tests/test-convert-filemap.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-convert-filemap.t	Tue Oct 20 15:59:10 2015 -0500
@@ -671,3 +671,73 @@
   |/
   o  0:c334dc3be0da@default "add" files: a
   
+  $ cd ..
+
+test converting merges into a repo that contains other files
+
+  $ hg init merge-test1
+  $ cd merge-test1
+  $ touch a && hg commit -Aqm 'add a'
+  $ echo a > a && hg commit -Aqm 'edit a'
+  $ hg up -q 0
+  $ touch b && hg commit -Aqm 'add b'
+  $ hg merge -q 1 && hg commit -qm 'merge a & b'
+
+  $ cd ..
+  $ hg init merge-test2
+  $ cd merge-test2
+  $ mkdir converted
+  $ touch converted/a toberemoved && hg commit -Aqm 'add converted/a & toberemoved'
+  $ touch x && rm toberemoved && hg commit -Aqm 'add x & remove tobremoved'
+  $ cd ..
+  $ hg log -G -T '{shortest(node)} {desc}' -R merge-test1
+  @    1191 merge a & b
+  |\
+  | o  9077 add b
+  | |
+  o |  d19f edit a
+  |/
+  o  ac82 add a
+  
+  $ hg log -G -T '{shortest(node)} {desc}' -R merge-test2
+  @  150e add x & remove tobremoved
+  |
+  o  bbac add converted/a & toberemoved
+  
+- Build a shamap where the target converted/a is in on top of an unrelated
+- change to 'x'. This simulates using convert to merge several repositories
+- together.
+  $ cat >> merge-test2/.hg/shamap <<EOF
+  > $(hg -R merge-test1 log -r 0 -T '{node}') $(hg -R merge-test2 log -r 0 -T '{node}')
+  > $(hg -R merge-test1 log -r 1 -T '{node}') $(hg -R merge-test2 log -r 1 -T '{node}')
+  > EOF
+  $ cat >> merge-test-filemap <<EOF
+  > rename . converted/
+  > EOF
+  $ hg convert --filemap merge-test-filemap merge-test1 merge-test2 --traceback
+  scanning source...
+  sorting...
+  converting...
+  1 add b
+  0 merge a & b
+  $ hg -R merge-test2 manifest -r tip
+  converted/a
+  converted/b
+  x
+  $ hg -R merge-test2 log -G -T '{shortest(node)} {desc}\n{files % "- {file}\n"}\n'
+  o    6eaa merge a & b
+  |\   - converted/a
+  | |  - toberemoved
+  | |
+  | o  2995 add b
+  | |  - converted/b
+  | |
+  @ |  150e add x & remove tobremoved
+  |/   - toberemoved
+  |    - x
+  |
+  o  bbac add converted/a & toberemoved
+     - converted/a
+     - toberemoved
+  
+
--- a/tests/test-convert-git.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-convert-git.t	Tue Oct 20 15:59:10 2015 -0500
@@ -652,6 +652,12 @@
   $ hg -R git-repo6-hg tip -T "{file_dels}\n"
   .hgsub .hgsubstate
 
+skip submodules in the conversion
+
+  $ hg convert -q git-repo6 no-submodules --config convert.git.skipsubmodules=True
+  $ hg -R no-submodules manifest --all
+  .gitmodules-renamed
+
 convert using a different remote prefix
   $ git init git-repo7
   Initialized empty Git repository in $TESTTMP/git-repo7/.git/
@@ -678,6 +684,28 @@
      master                    0:03bf38caa4c6
      origin/master             0:03bf38caa4c6
 
+Run convert when the remote branches have changed
+(there was an old bug where the local convert read branches from the server)
+
+  $ cd git-repo7
+  $ echo a >> a
+  $ git commit -am "move master forward"
+  [master 0c81947] move master forward
+   Author: nottest <test@example.org>
+   1 file changed, 1 insertion(+)
+  $ cd ..
+  $ rm -rf hg-repo7
+  $ hg convert --config convert.git.remoteprefix=origin git-repo7-client hg-repo7
+  initializing destination hg-repo7 repository
+  scanning source...
+  sorting...
+  converting...
+  0 commit a
+  updating bookmarks
+  $ hg -R hg-repo7 bookmarks
+     master                    0:03bf38caa4c6
+     origin/master             0:03bf38caa4c6
+
 damaged git repository tests:
 In case the hard-coded hashes change, the following commands can be used to
 list the hashes and their corresponding types in the repository:
--- a/tests/test-convert-hg-startrev.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-convert-hg-startrev.t	Tue Oct 20 15:59:10 2015 -0500
@@ -201,4 +201,23 @@
   |
   o  0 "0: add a b f" files: a b f
   
-  $ cd ..
+Convert from specified revs
+
+  $ hg convert --rev 3 --rev 2 source multiplerevs
+  initializing destination multiplerevs repository
+  scanning source...
+  sorting...
+  converting...
+  3 0: add a b f
+  2 1: add c, move f to d
+  1 2: copy e from a, change b
+  0 3: change a
+  $ glog multiplerevs
+  o  3 "3: change a" files: a
+  |
+  | o  2 "2: copy e from a, change b" files: b e
+  | |
+  | o  1 "1: add c, move f to d" files: c d f
+  |/
+  o  0 "0: add a b f" files: a b f
+  
--- a/tests/test-convert-splicemap.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-convert-splicemap.t	Tue Oct 20 15:59:10 2015 -0500
@@ -100,9 +100,9 @@
   sorting...
   converting...
   2 addaandd
-  spliced in ['6d4c2037ddc2cb2627ac3a244ecce35283268f8e'] as parents of 527cdedf31fbd5ea708aa14eeecf53d4676f38db
+  spliced in 6d4c2037ddc2cb2627ac3a244ecce35283268f8e as parents of 527cdedf31fbd5ea708aa14eeecf53d4676f38db
   1 changed
-  spliced in ['e55c719b85b60e5102fac26110ba626e7cb6b7dc', '527cdedf31fbd5ea708aa14eeecf53d4676f38db'] as parents of e4ea00df91897da3079a10fab658c1eddba6617b
+  spliced in e55c719b85b60e5102fac26110ba626e7cb6b7dc and 527cdedf31fbd5ea708aa14eeecf53d4676f38db as parents of e4ea00df91897da3079a10fab658c1eddba6617b
   0 adde
   $ glog -R target1
   o  5:16bc847b02aa "adde" files: e
@@ -161,7 +161,7 @@
   2 changea
   1 addb
   0 changeaagain
-  spliced in ['717d54d67e6c31fd75ffef2ff3042bdd98418437', '102a90ea7b4a3361e4082ed620918c261189a36a'] as parents of 7c364e7fa7d70ae525610c016317ed717b519d97
+  spliced in 717d54d67e6c31fd75ffef2ff3042bdd98418437 and 102a90ea7b4a3361e4082ed620918c261189a36a as parents of 7c364e7fa7d70ae525610c016317ed717b519d97
   $ glog -R ordered-hg1
   o    3:4cb04b9afbf2 "changeaagain" files: a
   |\
@@ -199,7 +199,7 @@
   sorting...
   converting...
   0 changeaagain
-  spliced in ['717d54d67e6c31fd75ffef2ff3042bdd98418437', '102a90ea7b4a3361e4082ed620918c261189a36a'] as parents of 7c364e7fa7d70ae525610c016317ed717b519d97
+  spliced in 717d54d67e6c31fd75ffef2ff3042bdd98418437 and 102a90ea7b4a3361e4082ed620918c261189a36a as parents of 7c364e7fa7d70ae525610c016317ed717b519d97
   $ glog -R ordered-hg2
   o    3:4cb04b9afbf2 "changeaagain" files: a
   |\
--- a/tests/test-convert-svn-sink.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-convert-svn-sink.t	Tue Oct 20 15:59:10 2015 -0500
@@ -382,8 +382,7 @@
   3 files updated, 0 files merged, 2 files removed, 0 files unresolved
   $ hg --cwd b merge
   merging b
-  warning: conflicts during merge.
-  merging b incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
   2 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
   [1]
--- a/tests/test-convert.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-convert.t	Tue Oct 20 15:59:10 2015 -0500
@@ -265,6 +265,9 @@
                     remote refs are converted as bookmarks with
                     "convert.git.remoteprefix" as a prefix followed by a /. The
                     default is 'remote'.
+      convert.git.skipsubmodules
+                    does not convert root level .gitmodules files or files with
+                    160000 mode indicating a submodule. Default is False.
   
       Perforce Source
       ###############
--- a/tests/test-copy-move-merge.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-copy-move-merge.t	Tue Oct 20 15:59:10 2015 -0500
@@ -34,13 +34,13 @@
    preserving a for resolve of b
    preserving a for resolve of c
   removing a
-   b: remote moved from a -> m
-  picked tool 'internal:merge' for b (binary False symlink False)
+   b: remote moved from a -> m (premerge)
+  picked tool ':merge' for b (binary False symlink False)
   merging a and b to b
   my b@add3f11052fa+ other b@17c05bb7fcb6 ancestor a@b8bf91eeebbc
    premerge successful
-   c: remote moved from a -> m
-  picked tool 'internal:merge' for c (binary False symlink False)
+   c: remote moved from a -> m (premerge)
+  picked tool ':merge' for c (binary False symlink False)
   merging a and c to c
   my c@add3f11052fa+ other c@17c05bb7fcb6 ancestor a@b8bf91eeebbc
    premerge successful
@@ -59,4 +59,107 @@
   1
   2
 
+Test disabling copy tracing
+
+- first verify copy metadata was kept
+
+  $ hg up -qC 2
+  $ hg rebase --keep -d 1 -b 2 --config extensions.rebase=
+  rebasing 2:add3f11052fa "other" (tip)
+  merging b and a to b
+  merging c and a to c
+
+  $ cat b
+  0
+  1
+  2
+
+- next verify copy metadata is lost when disabled
+
+  $ hg strip -r . --config extensions.strip=
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  saved backup bundle to $TESTTMP/t/.hg/strip-backup/550bd84c0cd3-fc575957-backup.hg (glob)
+  $ hg up -qC 2
+  $ hg rebase --keep -d 1 -b 2 --config extensions.rebase= --config experimental.disablecopytrace=True
+  rebasing 2:add3f11052fa "other" (tip)
+  remote changed a which local deleted
+  use (c)hanged version or leave (d)eleted? c
+
+  $ cat b
+  1
+  2
+
   $ cd ..
+
+Verify disabling copy tracing still keeps copies from rebase source
+
+  $ hg init copydisable
+  $ cd copydisable
+  $ touch a
+  $ hg ci -Aqm 'add a'
+  $ touch b
+  $ hg ci -Aqm 'add b, c'
+  $ hg cp b x
+  $ echo x >> x
+  $ hg ci -qm 'copy b->x'
+  $ hg up -q 1
+  $ touch z
+  $ hg ci -Aqm 'add z'
+  $ hg log -G -T '{rev} {desc}\n'
+  @  3 add z
+  |
+  | o  2 copy b->x
+  |/
+  o  1 add b, c
+  |
+  o  0 add a
+  
+  $ hg rebase -d . -b 2 --config extensions.rebase= --config experimental.disablecopytrace=True
+  rebasing 2:6adcf8c12e7d "copy b->x"
+  saved backup bundle to $TESTTMP/copydisable/.hg/strip-backup/6adcf8c12e7d-ce4b3e75-backup.hg (glob)
+  $ hg up -q 3
+  $ hg log -f x -T '{rev} {desc}\n'
+  3 copy b->x
+  1 add b, c
+
+  $ cd ../
+
+Verify we duplicate existing copies, instead of detecting them
+
+  $ hg init copydisable3
+  $ cd copydisable3
+  $ touch a
+  $ hg ci -Aqm 'add a'
+  $ hg cp a b
+  $ hg ci -Aqm 'copy a->b'
+  $ hg mv b c
+  $ hg ci -Aqm 'move b->c'
+  $ hg up -q 0
+  $ hg cp a b
+  $ echo b >> b
+  $ hg ci -Aqm 'copy a->b (2)'
+  $ hg log -G -T '{rev} {desc}\n'
+  @  3 copy a->b (2)
+  |
+  | o  2 move b->c
+  | |
+  | o  1 copy a->b
+  |/
+  o  0 add a
+  
+  $ hg rebase -d 2 -s 3 --config extensions.rebase= --config experimental.disablecopytrace=True
+  rebasing 3:47e1a9e6273b "copy a->b (2)" (tip)
+  saved backup bundle to $TESTTMP/copydisable3/.hg/strip-backup/47e1a9e6273b-2d099c59-backup.hg (glob)
+
+  $ hg log -G -f b
+  @  changeset:   3:76024fb4b05b
+  |  tag:         tip
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     copy a->b (2)
+  |
+  o  changeset:   0:ac82d8b1f7c4
+     user:        test
+     date:        Thu Jan 01 00:00:00 1970 +0000
+     summary:     add a
+  
--- a/tests/test-custom-filters.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-custom-filters.t	Tue Oct 20 15:59:10 2015 -0500
@@ -10,11 +10,11 @@
   > EOF
 
   $ cat > prefix.py <<EOF
-  > from mercurial import util
+  > from mercurial import error
   > def stripprefix(s, cmd, filename, **kwargs):
   >     header = '%s\n' % cmd
   >     if s[:len(header)] != header:
-  >         raise util.Abort('missing header "%s" in %s' % (cmd, filename))
+  >         raise error.Abort('missing header "%s" in %s' % (cmd, filename))
   >     return s[len(header):]
   > def insertprefix(s, cmd):
   >     return '%s\n%s' % (cmd, s)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-debian-packages.t	Tue Oct 20 15:59:10 2015 -0500
@@ -0,0 +1,20 @@
+#require test-repo slow debhelper
+
+Ensure debuild doesn't run the testsuite, as that could get silly.
+  $ DEB_BUILD_OPTIONS=nocheck
+  $ export DEB_BUILD_OPTIONS
+  $ OUTPUTDIR=`pwd`
+  $ export OUTPUTDIR
+
+  $ cd "$TESTDIR"/..
+  $ make deb > $OUTPUTDIR/build.log 2>&1
+  $ cd $OUTPUTDIR
+  $ ls *.deb
+  mercurial-common_*.deb (glob)
+  mercurial_*.deb (glob)
+main deb should have .so but no .py
+  $ dpkg --contents mercurial_*.deb | egrep '(localrepo|parsers)'
+  * ./usr/lib/python2.7/dist-packages/mercurial/parsers*.so (glob)
+mercurial-common should have py but no .so or pyc
+  $ dpkg --contents mercurial-common_*.deb | egrep '(localrepo|parsers)'
+  * ./usr/lib/python2.7/dist-packages/mercurial/localrepo.py (glob)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-debugextensions.t	Tue Oct 20 15:59:10 2015 -0500
@@ -0,0 +1,83 @@
+  $ hg debugextensions
+
+  $ debugpath=`pwd`/extwithoutinfos.py
+
+  $ cat > extwithoutinfos.py <<EOF
+  > EOF
+
+  $ cat >> $HGRCPATH <<EOF
+  > [extensions]
+  > color=
+  > histedit=
+  > patchbomb=
+  > rebase=
+  > mq=
+  > ext1 = $debugpath
+  > EOF
+
+  $ hg debugextensions
+  color
+  ext1 (untested!)
+  histedit
+  mq
+  patchbomb
+  rebase
+
+  $ hg debugextensions -v
+  color
+    location: */hgext/color.pyc (glob)
+    tested with: internal
+  ext1
+    location: */extwithoutinfos.pyc (glob)
+  histedit
+    location: */hgext/histedit.pyc (glob)
+    tested with: internal
+  mq
+    location: */hgext/mq.pyc (glob)
+    tested with: internal
+  patchbomb
+    location: */hgext/patchbomb.pyc (glob)
+    tested with: internal
+  rebase
+    location: */hgext/rebase.pyc (glob)
+    tested with: internal
+
+  $ hg debugextensions -Tjson | sed 's|\\\\|/|g'
+  [
+   {
+    "buglink": "",
+    "name": "color",
+    "source": "*/hgext/color.pyc", (glob)
+    "testedwith": "internal"
+   },
+   {
+    "buglink": "",
+    "name": "ext1",
+    "source": "*/extwithoutinfos.pyc", (glob)
+    "testedwith": ""
+   },
+   {
+    "buglink": "",
+    "name": "histedit",
+    "source": "*/hgext/histedit.pyc", (glob)
+    "testedwith": "internal"
+   },
+   {
+    "buglink": "",
+    "name": "mq",
+    "source": "*/hgext/mq.pyc", (glob)
+    "testedwith": "internal"
+   },
+   {
+    "buglink": "",
+    "name": "patchbomb",
+    "source": "*/hgext/patchbomb.pyc", (glob)
+    "testedwith": "internal"
+   },
+   {
+    "buglink": "",
+    "name": "rebase",
+    "source": "*/hgext/rebase.pyc", (glob)
+    "testedwith": "internal"
+   }
+  ]
--- a/tests/test-default-push.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-default-push.t	Tue Oct 20 15:59:10 2015 -0500
@@ -18,7 +18,6 @@
 Push should provide a hint when both 'default' and 'default-push' not set:
   $ cd c
   $ hg push --config paths.default=
-  pushing to default-push
   abort: default repository not configured!
   (see the "path" section in "hg help config")
   [255]
@@ -46,3 +45,9 @@
   adding manifests
   adding file changes
   added 1 changesets with 1 changes to 1 files
+
+Pushing to a path that isn't defined should not fall back to default
+
+  $ hg --cwd b push doesnotexist
+  abort: repository doesnotexist does not exist!
+  [255]
--- a/tests/test-diff-change.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-diff-change.t	Tue Oct 20 15:59:10 2015 -0500
@@ -29,15 +29,59 @@
   -first
   +second
 
-Test dumb revspecs (issue3474)
+  $ cd ..
+
+Test dumb revspecs: top-level "x:y", "x:", ":y" and ":" ranges should be handled
+as pairs even if x == y, but not for "f(x:y)" nor "x::y" (issue3474, issue4774)
+
+  $ hg clone -q a dumbspec
+  $ cd dumbspec
+  $ echo "wdir" > file.txt
 
   $ hg diff -r 2:2
+  $ hg diff -r 2:.
+  $ hg diff -r 2:
+  $ hg diff -r :0
+  $ hg diff -r '2:first(2:2)'
+  $ hg diff -r 'first(2:2)' --nodates
+  diff -r bf5ff72eb7e0 file.txt
+  --- a/file.txt
+  +++ b/file.txt
+  @@ -1,1 +1,1 @@
+  -third
+  +wdir
+  $ hg diff -r 2::2 --nodates
+  diff -r bf5ff72eb7e0 file.txt
+  --- a/file.txt
+  +++ b/file.txt
+  @@ -1,1 +1,1 @@
+  -third
+  +wdir
   $ hg diff -r "2 and 1"
   abort: empty revision range
   [255]
 
+  $ cd ..
+
+  $ hg clone -qr0 a dumbspec-rev0
+  $ cd dumbspec-rev0
+  $ echo "wdir" > file.txt
+
+  $ hg diff -r :
+  $ hg diff -r 'first(:)' --nodates
+  diff -r 4bb65dda5db4 file.txt
+  --- a/file.txt
+  +++ b/file.txt
+  @@ -1,1 +1,1 @@
+  -first
+  +wdir
+
+  $ cd ..
+
 Testing diff --change when merge:
 
+  $ cd a
+
   $ for i in 1 2 3 4 5 6 7 8 9 10; do
   >    echo $i >> file.txt
   > done
--- a/tests/test-dirstate.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-dirstate.t	Tue Oct 20 15:59:10 2015 -0500
@@ -66,10 +66,10 @@
 coherent (issue4353)
 
   $ cat > ../dirstateexception.py <<EOF
-  > from mercurial import merge, extensions, util
+  > from mercurial import merge, extensions, error
   > 
   > def wraprecordupdates(orig, repo, actions, branchmerge):
-  >     raise util.Abort("simulated error while recording dirstateupdates")
+  >     raise error.Abort("simulated error while recording dirstateupdates")
   > 
   > def reposetup(ui, repo):
   >     extensions.wrapfunction(merge, 'recordupdates', wraprecordupdates)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-docker-packaging.t	Tue Oct 20 15:59:10 2015 -0500
@@ -0,0 +1,27 @@
+#require test-repo slow docker
+
+Ensure debuild doesn't run the testsuite, as that could get silly.
+  $ DEB_BUILD_OPTIONS=nocheck
+  $ export DEB_BUILD_OPTIONS
+  $ OUTPUTDIR=`pwd`
+  $ export OUTPUTDIR
+
+  $ cd "$TESTDIR"/..
+  $ make docker-debian-jessie > $OUTPUTDIR/build.log 2>&1
+  $ cd $OUTPUTDIR
+  $ ls *.deb
+  mercurial-common_*.deb (glob)
+  mercurial_*.deb (glob)
+
+We check debian package contents with portable tools so that when
+we're on non-debian machines we can still test the packages that are
+built using docker.
+
+main deb should have .so but no .py
+  $ ar x mercurial_*.deb
+  $ tar tf data.tar* | egrep '(localrepo|parsers)'
+  ./usr/lib/python2.7/dist-packages/mercurial/parsers*.so (glob)
+mercurial-common should have .py but no .so or .pyc
+  $ ar x mercurial-common_*.deb
+  $ tar tf data.tar* | egrep '(localrepo|parsers)'
+  ./usr/lib/python2.7/dist-packages/mercurial/localrepo.py
--- a/tests/test-double-merge.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-double-merge.t	Tue Oct 20 15:59:10 2015 -0500
@@ -37,13 +37,13 @@
    ancestor: e6dc8efe11cc, local: 6a0df1dad128+, remote: 484bf6903104
    preserving foo for resolve of bar
    preserving foo for resolve of foo
-   bar: remote copied from foo -> m
-  picked tool 'internal:merge' for bar (binary False symlink False)
+   bar: remote copied from foo -> m (premerge)
+  picked tool ':merge' for bar (binary False symlink False)
   merging foo and bar to bar
   my bar@6a0df1dad128+ other bar@484bf6903104 ancestor foo@e6dc8efe11cc
    premerge successful
-   foo: versions differ -> m
-  picked tool 'internal:merge' for foo (binary False symlink False)
+   foo: versions differ -> m (premerge)
+  picked tool ':merge' for foo (binary False symlink False)
   merging foo
   my foo@6a0df1dad128+ other foo@484bf6903104 ancestor foo@e6dc8efe11cc
    premerge successful
--- a/tests/test-eol-tag.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-eol-tag.t	Tue Oct 20 15:59:10 2015 -0500
@@ -1,4 +1,4 @@
-http://mercurial.selenic.com/bts/issue2493
+https://bz.mercurial-scm.org/2493
 
 Testing tagging with the EOL extension
 
--- a/tests/test-eolfilename.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-eolfilename.t	Tue Oct 20 15:59:10 2015 -0500
@@ -1,6 +1,6 @@
 #require eol-in-paths
 
-http://mercurial.selenic.com/bts/issue352
+https://bz.mercurial-scm.org/352
 
 test issue352
 
@@ -51,7 +51,7 @@
   abort: '\n' and '\r' disallowed in filenames: 'quick\rfox'
   [255]
 
-http://mercurial.selenic.com/bts/issue2036
+https://bz.mercurial-scm.org/2036
 
   $ cd ..
 
--- a/tests/test-extdiff.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-extdiff.t	Tue Oct 20 15:59:10 2015 -0500
@@ -46,6 +46,7 @@
    -o --option OPT [+]      pass option to comparison program
    -r --rev REV [+]         revision
    -c --change REV          change made by revision
+      --patch               compare patches for two revisions
    -I --include PATTERN [+] include names matching the given patterns
    -X --exclude PATTERN [+] exclude names matching the given patterns
    -S --subrepos            recurse into subrepositories
--- a/tests/test-extension.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-extension.t	Tue Oct 20 15:59:10 2015 -0500
@@ -115,8 +115,6 @@
   3) bar extsetup
   4) foo reposetup
   4) bar reposetup
-  4) foo reposetup
-  4) bar reposetup
 
   $ echo 'foo = !' >> $HGRCPATH
   $ echo 'bar = !' >> $HGRCPATH
@@ -289,17 +287,23 @@
   $ echo "debugextension = $debugpath" >> $HGRCPATH
 
   $ hg help debugextension
-  debugextension extension - only debugcommands
+  hg debugextensions
+  
+  show information about active extensions
   
-  no commands defined
+  options:
+  
+  (some details hidden, use --verbose to show complete help)
 
 
   $ hg --verbose help debugextension
-  debugextension extension - only debugcommands
+  hg debugextensions
+  
+  show information about active extensions
   
-  list of commands:
+  options:
   
-   foo           yet another foo command
+   -T --template TEMPLATE display with template (EXPERIMENTAL)
   
   global options ([+] can be repeated):
   
@@ -328,12 +332,13 @@
 
 
   $ hg --debug help debugextension
-  debugextension extension - only debugcommands
+  hg debugextensions
+  
+  show information about active extensions
   
-  list of commands:
+  options:
   
-   debugfoobar   yet another debug command
-   foo           yet another foo command
+   -T --template TEMPLATE display with template (EXPERIMENTAL)
   
   global options ([+] can be repeated):
   
@@ -392,6 +397,7 @@
    -o --option OPT [+]      pass option to comparison program
    -r --rev REV [+]         revision
    -c --change REV          change made by revision
+      --patch               compare patches for two revisions
    -I --include PATTERN [+] include names matching the given patterns
    -X --exclude PATTERN [+] exclude names matching the given patterns
    -S --subrepos            recurse into subrepositories
@@ -546,20 +552,7 @@
 
 Issue811: Problem loading extensions twice (by site and by user)
 
-  $ debugpath=`pwd`/debugissue811.py
-  $ cat > debugissue811.py <<EOF
-  > '''show all loaded extensions
-  > '''
-  > from mercurial import cmdutil, commands, extensions
-  > cmdtable = {}
-  > command = cmdutil.command(cmdtable)
-  > @command('debugextensions', [], 'hg debugextensions', norepo=True)
-  > def debugextensions(ui):
-  >     "yet another debug command"
-  >     ui.write("%s\n" % '\n'.join([x for x, y in extensions.extensions()]))
-  > EOF
   $ cat <<EOF >> $HGRCPATH
-  > debugissue811 = $debugpath
   > mq =
   > strip =
   > hgext.mq =
@@ -570,9 +563,8 @@
 (note that mq force load strip, also checking it's not loaded twice)
 
   $ hg debugextensions
-  debugissue811
+  mq
   strip
-  mq
 
 For extensions, which name matches one of its commands, help
 message should ask '-v -e' to get list of built-in aliases
@@ -944,6 +936,15 @@
   ** Mercurial Distributed SCM (version 1.9.3)
   ** Extensions loaded: throw, older
 
+Ability to point to a different point
+  $ hg --config extensions.throw=throw.py --config extensions.older=older.py \
+  >   --config ui.supportcontact='Your Local Goat Lenders' throw 2>&1 | egrep '^\*\*'
+  ** unknown exception encountered, please report by visiting
+  ** Your Local Goat Lenders
+  ** Python * (glob)
+  ** Mercurial Distributed SCM (*) (glob)
+  ** Extensions loaded: throw, older
+
 Declare the version as supporting this hg version, show regular bts link:
   $ hgver=`$PYTHON -c 'from mercurial import util; print util.version().split("+")[0]'`
   $ echo 'testedwith = """'"$hgver"'"""' >> throw.py
@@ -953,7 +954,7 @@
   $ rm -f throw.pyc throw.pyo
   $ hg --config extensions.throw=throw.py throw 2>&1 | egrep '^\*\*'
   ** unknown exception encountered, please report by visiting
-  ** http://mercurial.selenic.com/wiki/BugTracker
+  ** https://mercurial-scm.org/wiki/BugTracker
   ** Python * (glob)
   ** Mercurial Distributed SCM (*) (glob)
   ** Extensions loaded: throw
@@ -964,7 +965,7 @@
   $ rm -f throw.pyc throw.pyo
   $ hg --config extensions.throw=throw.py throw 2>&1 | egrep '^\*\*'
   ** unknown exception encountered, please report by visiting
-  ** http://mercurial.selenic.com/wiki/BugTracker
+  ** https://mercurial-scm.org/wiki/BugTracker
   ** Python * (glob)
   ** Mercurial Distributed SCM (*) (glob)
   ** Extensions loaded: throw
@@ -974,7 +975,7 @@
   $ rm -f throw.pyc throw.pyo
   $ hg version -v
   Mercurial Distributed SCM (version *) (glob)
-  (see http://mercurial.selenic.com for more information)
+  (see https://mercurial-scm.org for more information)
   
   Copyright (C) 2005-* Matt Mackall and others (glob)
   This is free software; see the source for copying conditions. There is NO
@@ -985,7 +986,7 @@
 
   $ hg version -v --config extensions.throw=throw.py
   Mercurial Distributed SCM (version *) (glob)
-  (see http://mercurial.selenic.com for more information)
+  (see https://mercurial-scm.org for more information)
   
   Copyright (C) 2005-* Matt Mackall and others (glob)
   This is free software; see the source for copying conditions. There is NO
@@ -998,7 +999,7 @@
   $ rm -f throw.pyc throw.pyo
   $ hg version -v --config extensions.throw=throw.py
   Mercurial Distributed SCM (version *) (glob)
-  (see http://mercurial.selenic.com for more information)
+  (see https://mercurial-scm.org for more information)
   
   Copyright (C) 2005-* Matt Mackall and others (glob)
   This is free software; see the source for copying conditions. There is NO
--- a/tests/test-filecache.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-filecache.py	Tue Oct 20 15:59:10 2015 -0500
@@ -130,7 +130,7 @@
     util.cachestat.__init__ = originit
 
 def test_filecache_synced():
-    # test old behaviour that caused filecached properties to go out of sync
+    # test old behavior that caused filecached properties to go out of sync
     os.system('hg init && echo a >> a && hg ci -qAm.')
     repo = hg.repository(ui.ui())
     # first rollback clears the filecache, but changelog to stays in __dict__
--- a/tests/test-filelog.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-filelog.py	Tue Oct 20 15:59:10 2015 -0500
@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 """
-Tests the behaviour of filelog w.r.t. data starting with '\1\n'
+Tests the behavior of filelog w.r.t. data starting with '\1\n'
 """
 from mercurial import ui, hg
 from mercurial.node import nullid, hex
--- a/tests/test-fileset.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-fileset.t	Tue Oct 20 15:59:10 2015 -0500
@@ -49,6 +49,9 @@
   $ fileset 'a* - a1'
   a2
   $ fileset 'a_b'
+  $ fileset '"\xy"'
+  hg: parse error: invalid \x escape
+  [255]
 
 Test files status
 
@@ -159,9 +162,8 @@
   $ fileset 'unresolved()'
   $ hg merge
   merging b2
-  warning: conflicts during merge.
-  merging b2 incomplete! (edit conflicts, then use 'hg resolve --mark')
-  * files updated, 0 files merged, * files removed, 1 files unresolved (glob)
+  warning: conflicts while merging b2! (edit, then use 'hg resolve --mark')
+  6 files updated, 0 files merged, 1 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
   [1]
   $ fileset 'resolved()'
--- a/tests/test-fncache.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-fncache.t	Tue Oct 20 15:59:10 2015 -0500
@@ -202,13 +202,13 @@
 
   $ cat > exceptionext.py <<EOF
   > import os
-  > from mercurial import commands, util
+  > from mercurial import commands, error
   > from mercurial.extensions import wrapfunction
   > 
-  > def lockexception(orig, vfs, lockname, wait, releasefn, acquirefn, desc):
+  > def lockexception(orig, vfs, lockname, wait, releasefn, *args, **kwargs):
   >     def releasewrap():
-  >         raise util.Abort("forced lock failure")
-  >     return orig(vfs, lockname, wait, releasewrap, acquirefn, desc)
+  >         raise error.Abort("forced lock failure")
+  >     return orig(vfs, lockname, wait, releasewrap, *args, **kwargs)
   > 
   > def reposetup(ui, repo):
   >     wrapfunction(repo, '_lock', lockexception)
@@ -231,13 +231,13 @@
 
   $ cat > ../exceptionext.py <<EOF
   > import os
-  > from mercurial import commands, util, localrepo
+  > from mercurial import commands, error, localrepo
   > from mercurial.extensions import wrapfunction
   > 
   > def wrapper(orig, self, *args, **kwargs):
   >     tr = orig(self, *args, **kwargs)
   >     def fail(tr):
-  >         raise util.Abort("forced transaction failure")
+  >         raise error.Abort("forced transaction failure")
   >     # zzz prefix to ensure it sorted after store.write
   >     tr.addfinalize('zzz-forcefails', fail)
   >     return tr
@@ -262,19 +262,19 @@
 
   $ cat > ../exceptionext.py <<EOF
   > import os
-  > from mercurial import commands, util, transaction, localrepo
+  > from mercurial import commands, error, transaction, localrepo
   > from mercurial.extensions import wrapfunction
   > 
   > def trwrapper(orig, self, *args, **kwargs):
   >     tr = orig(self, *args, **kwargs)
   >     def fail(tr):
-  >         raise util.Abort("forced transaction failure")
+  >         raise error.Abort("forced transaction failure")
   >     # zzz prefix to ensure it sorted after store.write
   >     tr.addfinalize('zzz-forcefails', fail)
   >     return tr
   > 
   > def abortwrapper(orig, self, *args, **kwargs):
-  >     raise util.Abort("forced transaction failure")
+  >     raise error.Abort("forced transaction failure")
   > 
   > def uisetup(ui):
   >     wrapfunction(localrepo.localrepository, 'transaction', trwrapper)
--- a/tests/test-generaldelta.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-generaldelta.t	Tue Oct 20 15:59:10 2015 -0500
@@ -69,3 +69,46 @@
      rev    offset  length   base linkrev nodeid       p1           p2
        0         0       3      0       1 1406e7411862 000000000000 000000000000
 
+  $ cd ..
+
+Test format.aggressivemergedeltas
+
+  $ hg init --config format.generaldelta=1 aggressive
+  $ cd aggressive
+  $ touch a b c d e
+  $ hg commit -Aqm side1
+  $ hg up -q null
+  $ touch x y
+  $ hg commit -Aqm side2
+
+- Verify non-aggressive merge uses p1 (commit 1) as delta parent
+  $ hg merge -q 0
+  $ hg commit -q -m merge
+  $ hg debugindex -m
+     rev    offset  length  delta linkrev nodeid       p1           p2
+       0         0      59     -1       0 8dde941edb6e 000000000000 000000000000
+       1        59      59     -1       1 315c023f341d 000000000000 000000000000
+       2       118      65      1       2 2ab389a983eb 315c023f341d 8dde941edb6e
+
+  $ hg strip -q -r . --config extensions.strip=
+
+- Verify aggressive merge uses p2 (commit 0) as delta parent
+  $ hg up -q -C 1
+  $ hg merge -q 0
+  $ hg commit -q -m merge --config format.aggressivemergedeltas=True
+  $ hg debugindex -m
+     rev    offset  length  delta linkrev nodeid       p1           p2
+       0         0      59     -1       0 8dde941edb6e 000000000000 000000000000
+       1        59      59     -1       1 315c023f341d 000000000000 000000000000
+       2       118      62      0       2 2ab389a983eb 315c023f341d 8dde941edb6e
+
+Test that strip bundle use bundle2
+  $ hg --config extensions.strip= strip .
+  0 files updated, 0 files merged, 5 files removed, 0 files unresolved
+  saved backup bundle to $TESTTMP/aggressive/.hg/strip-backup/1c5d4dc9a8b8-6c68e60c-backup.hg (glob)
+  $ hg debugbundle .hg/strip-backup/*
+  Stream params: {'Compression': 'BZ'}
+  changegroup -- "{'version': '02'}"
+      1c5d4dc9a8b8d6e1750966d343e94db665e7a1e9
+
+  $ cd ..
--- a/tests/test-graft.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-graft.t	Tue Oct 20 15:59:10 2015 -0500
@@ -153,8 +153,8 @@
    branchmerge: True, force: True, partial: False
    ancestor: 68795b066622, local: ef0ef43d49e7+, remote: 5d205f8b35b6
    preserving b for resolve of b
-   b: local copied/moved from a -> m
-  picked tool 'internal:merge' for b (binary False symlink False)
+   b: local copied/moved from a -> m (premerge)
+  picked tool ':merge' for b (binary False symlink False)
   merging b and a to b
   my b@ef0ef43d49e7+ other a@5d205f8b35b6 ancestor a@68795b066622
    premerge successful
@@ -183,12 +183,14 @@
    d: remote is newer -> g
   getting d
    b: remote unchanged -> k
-   e: versions differ -> m
-  picked tool 'internal:merge' for e (binary False symlink False)
+   e: versions differ -> m (premerge)
+  picked tool ':merge' for e (binary False symlink False)
   merging e
   my e@1905859650ec+ other e@9c233e8e184d ancestor e@68795b066622
-  warning: conflicts during merge.
-  merging e incomplete! (edit conflicts, then use 'hg resolve --mark')
+   e: versions differ -> m (merge)
+  picked tool ':merge' for e (binary False symlink False)
+  my e@1905859650ec+ other e@9c233e8e184d ancestor e@68795b066622
+  warning: conflicts while merging e! (edit, then use 'hg resolve --mark')
   abort: unresolved conflicts, can't continue
   (use hg resolve and hg graft --continue)
   [255]
@@ -220,8 +222,7 @@
   skipping revision 5:97f8bfe72746 (already grafted to 9:1905859650ec)
   grafting 4:9c233e8e184d "4"
   merging e
-  warning: conflicts during merge.
-  merging e incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging e! (edit, then use 'hg resolve --mark')
   abort: unresolved conflicts, can't continue
   (use hg resolve and hg graft --continue)
   [255]
@@ -332,6 +333,54 @@
   skipping already grafted revision 7:ef0ef43d49e7 (was grafted from 2:5c095ad7e90f)
   [255]
 
+  $ hg extdiff --config extensions.extdiff= --patch -r 2 -r 13
+  --- */hg-5c095ad7e90f.patch	* +0000 (glob)
+  +++ */hg-7a4785234d87.patch	* +0000 (glob)
+  @@ -1,18 +1,18 @@
+   # HG changeset patch
+  -# User test
+  +# User foo
+   # Date 0 0
+   #      Thu Jan 01 00:00:00 1970 +0000
+  -# Node ID 5c095ad7e90f871700f02dd1fa5012cb4498a2d4
+  -# Parent  5d205f8b35b66bc36375c9534ffd3237730e8f04
+  +# Node ID 7a4785234d87ec1aa420ed6b11afe40fa73e12a9
+  +# Parent  b592ea63bb0c19a6c5c44685ee29a2284f9f1b8f
+   2
+   
+  -diff -r 5d205f8b35b6 -r 5c095ad7e90f a
+  +diff -r b592ea63bb0c -r 7a4785234d87 a
+   --- a/a	Thu Jan 01 00:00:00 1970 +0000
+   +++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
+   @@ -1,1 +0,0 @@
+  --b
+  -diff -r 5d205f8b35b6 -r 5c095ad7e90f b
+  +-a
+  +diff -r b592ea63bb0c -r 7a4785234d87 b
+   --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+   +++ b/b	Thu Jan 01 00:00:00 1970 +0000
+   @@ -0,0 +1,1 @@
+  -+b
+  ++a
+  [1]
+
+  $ hg extdiff --config extensions.extdiff= --patch -r 2 -r 13 -X .
+  --- */hg-5c095ad7e90f.patch	* +0000 (glob)
+  +++ */hg-7a4785234d87.patch	* +0000 (glob)
+  @@ -1,8 +1,8 @@
+   # HG changeset patch
+  -# User test
+  +# User foo
+   # Date 0 0
+   #      Thu Jan 01 00:00:00 1970 +0000
+  -# Node ID 5c095ad7e90f871700f02dd1fa5012cb4498a2d4
+  -# Parent  5d205f8b35b66bc36375c9534ffd3237730e8f04
+  +# Node ID 7a4785234d87ec1aa420ed6b11afe40fa73e12a9
+  +# Parent  b592ea63bb0c19a6c5c44685ee29a2284f9f1b8f
+   2
+   
+  [1]
+
 Disallow grafting already grafted csets with the same origin onto each other
   $ hg up -q 13
   $ hg graft 2
@@ -373,8 +422,7 @@
   [255]
   $ hg resolve --all
   merging a
-  warning: conflicts during merge.
-  merging a incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
   [1]
   $ cat a
   <<<<<<< local: aaa4406d4f0a - test: 9
@@ -699,8 +747,7 @@
   [255]
   $ hg resolve --all
   merging a
-  warning: conflicts during merge.
-  merging a incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
   [1]
   $ echo abc > a
   $ hg resolve -m a
--- a/tests/test-help.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-help.t	Tue Oct 20 15:59:10 2015 -0500
@@ -249,6 +249,8 @@
        bugzilla      hooks for integrating with the Bugzilla bug tracker
        censor        erase file content at a given revision
        churn         command to display statistics about repository history
+       clonebundles  advertise pre-generated bundles to seed clones
+                     (experimental)
        color         colorize output from some commands
        convert       import revisions from foreign VCS repositories into
                      Mercurial
@@ -278,6 +280,11 @@
        transplant    command to transplant changesets from another branch
        win32mbcs     allow the use of MBCS paths with problematic encodings
        zeroconf      discover and advertise repositories on the local network
+
+Verify that extension keywords appear in help templates
+
+  $ hg help --config extensions.transplant= templating|grep transplant > /dev/null
+
 Test short command list with verbose option
 
   $ hg -v help shortlist
@@ -411,7 +418,7 @@
 
   $ hg add -h --version
   Mercurial Distributed SCM (version *) (glob)
-  (see http://mercurial.selenic.com for more information)
+  (see https://mercurial-scm.org for more information)
   
   Copyright (C) 2005-2015 Matt Mackall and others
   This is free software; see the source for copying conditions. There is NO
@@ -457,7 +464,7 @@
       manifest, and tracked files, as well as the integrity of their crosslinks
       and indices.
   
-      Please see http://mercurial.selenic.com/wiki/RepositoryCorruption for more
+      Please see https://mercurial-scm.org/wiki/RepositoryCorruption for more
       information about recovery from corruption of the repository.
   
       Returns 0 on success, 1 if errors are encountered.
@@ -616,6 +623,23 @@
   [255]
 
 
+Make sure that we don't run afoul of the help system thinking that
+this is a section and erroring out weirdly.
+
+  $ hg .log
+  hg: unknown command '.log'
+  (did you mean one of log?)
+  [255]
+
+  $ hg log.
+  hg: unknown command 'log.'
+  (did you mean one of log?)
+  [255]
+  $ hg pu.lh
+  hg: unknown command 'pu.lh'
+  (did you mean one of pull, push?)
+  [255]
+
   $ cat > helpext.py <<EOF
   > import os
   > from mercurial import cmdutil, commands
@@ -629,8 +653,8 @@
   >     ('', 'newline', '', 'line1\nline2')],
   >     'hg nohelp',
   >     norepo=True)
-  > @command('debugoptDEP', [('', 'dopt', None, 'option is DEPRECATED')])
-  > @command('debugoptEXP', [('', 'eopt', None, 'option is EXPERIMENTAL')])
+  > @command('debugoptDEP', [('', 'dopt', None, 'option is (DEPRECATED)')])
+  > @command('debugoptEXP', [('', 'eopt', None, 'option is (EXPERIMENTAL)')])
   > def nohelp(ui, *args, **kwargs):
   >     pass
   > 
@@ -757,6 +781,8 @@
   
    debugancestor
                  find the ancestor revision of two revisions in a given index
+   debugapplystreamclonebundle
+                 apply a stream clone bundle file
    debugbuilddag
                  builds a repo with a given DAG from scratch in the current
                  empty repo
@@ -767,6 +793,8 @@
                  list all available commands and options
    debugcomplete
                  returns the completion list associated with the given command
+   debugcreatestreamclonebundle
+                 create a stream clone bundle file
    debugdag      format the changelog or an index DAG as a concise textual
                  description
    debugdata     dump the contents of a data file revision
@@ -775,6 +803,8 @@
                  show the contents of the current dirstate
    debugdiscovery
                  runs the changeset discovery protocol in isolation
+   debugextensions
+                 show information about active extensions
    debugfileset  parse and apply a fileset specification
    debugfsinfo   show information detected about current filesystem
    debuggetbundle
@@ -786,6 +816,8 @@
    debuginstall  test Mercurial installation
    debugknown    test whether node ids are known to a repo
    debuglocks    show or modify state of locks
+   debugmergestate
+                 print merge state
    debugnamecomplete
                  complete "names" - tags, open branch names, bookmark names
    debugobsolete
@@ -849,9 +881,9 @@
 
 test deprecated and experimental options is shown with -v
   $ hg help -v debugoptDEP | grep dopt
-    --dopt option is DEPRECATED
+    --dopt option is (DEPRECATED)
   $ hg help -v debugoptEXP | grep eopt
-    --eopt option is EXPERIMENTAL
+    --eopt option is (EXPERIMENTAL)
 
 #if gettext
 test deprecated option is hidden with translation with untranslated description
@@ -912,6 +944,47 @@
       working directory is checked out, it is equivalent to null. If an
       uncommitted merge is in progress, "." is the revision of the first parent.
 
+Test repeated config section name
+
+  $ hg help config.host
+      "http_proxy.host"
+          Host name and (optional) port of the proxy server, for example
+          "myproxy:8000".
+  
+      "smtp.host"
+          Host name of mail server, e.g. "mail.example.com".
+  
+Unrelated trailing paragraphs shouldn't be included
+
+  $ hg help config.extramsg | grep '^$'
+  
+
+Test capitalized section name
+
+  $ hg help scripting.HGPLAIN > /dev/null
+
+Help subsection:
+
+  $ hg help config.charsets |grep "Email example:" > /dev/null
+  [1]
+
+Show nested definitions
+("profiling.type"[break]"ls"[break]"stat"[break])
+
+  $ hg help config.type | egrep '^$'|wc -l
+  \s*3 (re)
+
+Last item in help config.*:
+
+  $ hg help config.`hg help config|grep '^    "'| \
+  >       tail -1|sed 's![ "]*!!g'`| \
+  >   grep "hg help -c config" > /dev/null
+  [1]
+
+note to use help -c for general hg help config:
+
+  $ hg help config |grep "hg help -c config" > /dev/null
+
 Test templating help
 
   $ hg help templating | egrep '(desc|diffstat|firstline|nonempty)  '
@@ -920,12 +993,18 @@
       firstline     Any text. Returns the first line of text.
       nonempty      Any text. Returns '(none)' if the string is empty.
 
+Test deprecated items
+
+  $ hg help -v templating | grep currentbookmark
+      currentbookmark
+  $ hg help templating | (grep currentbookmark || true)
+
 Test help hooks
 
   $ cat > helphook1.py <<EOF
   > from mercurial import help
   > 
-  > def rewrite(topic, doc):
+  > def rewrite(ui, topic, doc):
   >     return doc + '\nhelphook1\n'
   > 
   > def extsetup(ui):
@@ -934,7 +1013,7 @@
   $ cat > helphook2.py <<EOF
   > from mercurial import help
   > 
-  > def rewrite(topic, doc):
+  > def rewrite(ui, topic, doc):
   >     return doc + '\nhelphook2\n'
   > 
   > def extsetup(ui):
@@ -947,6 +1026,28 @@
       helphook1
       helphook2
 
+Test -e / -c / -k combinations
+
+  $ hg help -c progress
+  abort: no such help topic: progress
+  (try "hg help --keyword progress")
+  [255]
+  $ hg help -e progress |head -1
+  progress extension - show progress bars for some actions (DEPRECATED)
+  $ hg help -c -k dates |egrep '^(Topics|Extensions|Commands):'
+  Commands:
+  $ hg help -e -k a |egrep '^(Topics|Extensions|Commands):'
+  Extensions:
+  $ hg help -e -c -k date |egrep '^(Topics|Extensions|Commands):'
+  Extensions:
+  Commands:
+  $ hg help -c commit > /dev/null
+  $ hg help -e -c commit > /dev/null
+  $ hg help -e commit > /dev/null
+  abort: no such help topic: commit
+  (try "hg help --keyword commit")
+  [255]
+
 Test keyword search help
 
   $ cat > prefixedname.py <<EOF
@@ -967,13 +1068,16 @@
   
   Commands:
   
-   bookmarks create a new bookmark or list existing bookmarks
-   clone     make a copy of an existing repository
-   paths     show aliases for remote repositories
-   update    update working directory (or switch revisions)
+   bookmarks                    create a new bookmark or list existing bookmarks
+   clone                        make a copy of an existing repository
+   debugapplystreamclonebundle  apply a stream clone bundle file
+   debugcreatestreamclonebundle create a stream clone bundle file
+   paths                        show aliases for remote repositories
+   update                       update working directory (or switch revisions)
   
   Extensions:
   
+   clonebundles advertise pre-generated bundles to seed clones (experimental)
    prefixedname matched against word "clone"
    relink       recreates hardlinks between repository clones
   
@@ -1023,7 +1127,7 @@
   > def extsetup(ui):
   >     help.helptable.append((["topic-containing-verbose"],
   >                            "This is the topic to test omit indicating.",
-  >                            lambda : testtopic))
+  >                            lambda ui: testtopic))
   > EOF
   $ echo '[extensions]' >> $HGRCPATH
   $ echo "addverboseitems = `pwd`/addverboseitems.py" >> $HGRCPATH
@@ -1095,8 +1199,7 @@
   
       "default"
           Directory or URL to use when pulling if no source is specified.
-          Default is set to repository from which the current repository was
-          cloned.
+          (default: repository from which the current repository was cloned)
   
       "default-push"
           Optional. Directory or URL to use when pushing if no destination is
@@ -1186,6 +1289,14 @@
         partially merged file. Markers will have two sections, one for each side
         of merge.
   
+      ":merge-local"
+        Like :merge, but resolve all conflicts non-interactively in favor of the
+        local changes.
+  
+      ":merge-other"
+        Like :merge, but resolve all conflicts non-interactively in favor of the
+        other changes.
+  
       ":merge3"
         Uses the internal non-interactive simple merge algorithm for merging
         files. It will fail if there are any conflicts and leave markers in the
@@ -1202,6 +1313,11 @@
       ":tagmerge"
         Uses the internal tag merge algorithm (experimental).
   
+      ":union"
+        Uses the internal non-interactive simple merge algorithm for merging
+        files. It will use both left and right sides for conflict regions. No
+        markers are inserted.
+  
       Internal tools are always available and do not require a GUI but will by
       default not handle symlinks or binary files.
   
@@ -1277,7 +1393,7 @@
   <div class="container">
   <div class="menu">
   <div class="logo">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" alt="mercurial" /></a>
   </div>
   <ul>
@@ -1835,7 +1951,7 @@
   <div class="container">
   <div class="menu">
   <div class="logo">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" alt="mercurial" /></a>
   </div>
   <ul>
@@ -1995,7 +2111,7 @@
   <div class="container">
   <div class="menu">
   <div class="logo">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" alt="mercurial" /></a>
   </div>
   <ul>
@@ -2188,7 +2304,7 @@
   <div class="container">
   <div class="menu">
   <div class="logo">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" alt="mercurial" /></a>
   </div>
   <ul>
--- a/tests/test-hghave.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-hghave.t	Tue Oct 20 15:59:10 2015 -0500
@@ -18,7 +18,7 @@
   >   $ echo foo
   >   foo
   > EOF
-  $ run-tests.py test-hghaveaddon.t
+  $ run-tests.py $HGTEST_RUN_TESTS_PURE test-hghaveaddon.t
   .
   # Ran 1 tests, 0 skipped, 0 warned, 0 failed.
 
--- a/tests/test-hgrc.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-hgrc.t	Tue Oct 20 15:59:10 2015 -0500
@@ -69,7 +69,7 @@
   $ echo '%include $FAKEPATH/no-such-file' > $HGRC
   $ hg version
   Mercurial Distributed SCM (version *) (glob)
-  (see http://mercurial.selenic.com for more information)
+  (see https://mercurial-scm.org for more information)
   
   Copyright (C) 2005-2015 Matt Mackall and others
   This is free software; see the source for copying conditions. There is NO
--- a/tests/test-hgweb-commands.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-hgweb-commands.t	Tue Oct 20 15:59:10 2015 -0500
@@ -705,7 +705,7 @@
   <div class="container">
   <div class="menu">
   <div class="logo">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" alt="mercurial" /></a>
   </div>
   <ul>
@@ -841,7 +841,7 @@
   <div class="container">
   <div class="menu">
   <div class="logo">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" alt="mercurial" /></a>
   </div>
   <ul>
@@ -991,7 +991,7 @@
   <div class="container">
   <div class="menu">
   <div class="logo">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" width=75 height=90 border=0 alt="mercurial"></a>
   </div>
   <ul>
@@ -1269,7 +1269,7 @@
   <div class="container">
   <div class="menu">
   <div class="logo">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" alt="mercurial" /></a>
   </div>
   <ul>
@@ -1335,9 +1335,8 @@
   <div class="overflow">
   <div class="sourcefirst linewraptoggle">line wrap: <a class="linewraplink" href="javascript:toggleLinewrap()">on</a></div>
   <div class="sourcefirst"> line source</div>
-  <pre class="sourcelines stripes4 wrap">
+  <pre class="sourcelines stripes4 wrap bottomline">
   <span id="l1">foo</span><a href="#l1"></a></pre>
-  <div class="sourcelast"></div>
   </div>
   </div>
   </div>
@@ -1396,7 +1395,7 @@
   <div class="container">
   <div class="menu">
   <div class="logo">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" alt="mercurial" /></a>
   </div>
   <ul>
@@ -1462,9 +1461,8 @@
   <div class="overflow">
   <div class="sourcefirst linewraptoggle">line wrap: <a class="linewraplink" href="javascript:toggleLinewrap()">on</a></div>
   <div class="sourcefirst"> line source</div>
-  <pre class="sourcelines stripes4 wrap">
+  <pre class="sourcelines stripes4 wrap bottomline">
   <span id="l1">another</span><a href="#l1"></a></pre>
-  <div class="sourcelast"></div>
   </div>
   </div>
   </div>
@@ -1516,7 +1514,7 @@
   <body>
   
   <div class="page_header">
-  <a href="http://mercurial.selenic.com/" title="Mercurial" style="float: right;">Mercurial</a>
+  <a href="https://mercurial-scm.org/" title="Mercurial" style="float: right;">Mercurial</a>
   <a href="/">Mercurial</a>  / summary
   <form action="/log">
   <input type="hidden" name="style" value="gitweb" />
@@ -1613,7 +1611,7 @@
   
   <tr class="parity0">
   <td class="age"><i class="age">Thu, 01 Jan 1970 00:00:00 +0000</i></td>
-  <td><a class="list" href="/rev/2ef0ac749a14?style=gitweb"><b>1.0</b></a></td>
+  <td><a class="list" href="/rev/1.0?style=gitweb"><b>1.0</b></a></td>
   <td class="link">
   <a href="/rev/2ef0ac749a14?style=gitweb">changeset</a> |
   <a href="/log/2ef0ac749a14?style=gitweb">changelog</a> |
@@ -1628,7 +1626,7 @@
   
   <tr class="parity0">
   <td class="age"><i class="age">Thu, 01 Jan 1970 00:00:00 +0000</i></td>
-  <td><a class="list" href="/rev/2ef0ac749a14?style=gitweb"><b>anotherthing</b></a></td>
+  <td><a class="list" href="/rev/anotherthing?style=gitweb"><b>anotherthing</b></a></td>
   <td class="link">
   <a href="/rev/2ef0ac749a14?style=gitweb">changeset</a> |
   <a href="/log/2ef0ac749a14?style=gitweb">changelog</a> |
@@ -1637,7 +1635,7 @@
   </tr>
   <tr class="parity1">
   <td class="age"><i class="age">Thu, 01 Jan 1970 00:00:00 +0000</i></td>
-  <td><a class="list" href="/rev/cad8025a2e87?style=gitweb"><b>something</b></a></td>
+  <td><a class="list" href="/rev/something?style=gitweb"><b>something</b></a></td>
   <td class="link">
   <a href="/rev/cad8025a2e87?style=gitweb">changeset</a> |
   <a href="/log/cad8025a2e87?style=gitweb">changelog</a> |
@@ -1652,8 +1650,7 @@
   
   <tr class="parity0">
   <td class="age"><i class="age">Thu, 01 Jan 1970 00:00:00 +0000</i></td>
-  <td><a class="list" href="/shortlog/cad8025a2e87?style=gitweb"><b>cad8025a2e87</b></a></td>
-  <td class="">unstable</td>
+  <td class="open"><a class="list" href="/shortlog/unstable?style=gitweb"><b>unstable</b></a></td>
   <td class="link">
   <a href="/changeset/cad8025a2e87?style=gitweb">changeset</a> |
   <a href="/log/cad8025a2e87?style=gitweb">changelog</a> |
@@ -1662,8 +1659,7 @@
   </tr>
   <tr class="parity1">
   <td class="age"><i class="age">Thu, 01 Jan 1970 00:00:00 +0000</i></td>
-  <td><a class="list" href="/shortlog/1d22e65f027e?style=gitweb"><b>1d22e65f027e</b></a></td>
-  <td class="">stable</td>
+  <td class="inactive"><a class="list" href="/shortlog/stable?style=gitweb"><b>stable</b></a></td>
   <td class="link">
   <a href="/changeset/1d22e65f027e?style=gitweb">changeset</a> |
   <a href="/log/1d22e65f027e?style=gitweb">changelog</a> |
@@ -1672,8 +1668,7 @@
   </tr>
   <tr class="parity0">
   <td class="age"><i class="age">Thu, 01 Jan 1970 00:00:00 +0000</i></td>
-  <td><a class="list" href="/shortlog/a4f92ed23982?style=gitweb"><b>a4f92ed23982</b></a></td>
-  <td class="">default</td>
+  <td class="inactive"><a class="list" href="/shortlog/default?style=gitweb"><b>default</b></a></td>
   <td class="link">
   <a href="/changeset/a4f92ed23982?style=gitweb">changeset</a> |
   <a href="/log/a4f92ed23982?style=gitweb">changelog</a> |
@@ -1719,7 +1714,7 @@
   <body>
   
   <div class="page_header">
-  <a href="http://mercurial.selenic.com/" title="Mercurial" style="float: right;">Mercurial</a>
+  <a href="https://mercurial-scm.org/" title="Mercurial" style="float: right;">Mercurial</a>
   <a href="/">Mercurial</a>  / graph
   </div>
   
@@ -1731,7 +1726,7 @@
   </form>
   <div class="page_nav">
   <a href="/summary?style=gitweb">summary</a> |
-  <a href="/shortlog?style=gitweb">shortlog</a> |
+  <a href="/shortlog/tip?style=gitweb">shortlog</a> |
   <a href="/log/tip?style=gitweb">changelog</a> |
   graph |
   <a href="/tags?style=gitweb">tags</a> |
@@ -1949,8 +1944,8 @@
   .age { white-space:nowrap; }
   .date { white-space:nowrap; }
   .indexlinks { white-space:nowrap; }
-  .parity0 { background-color: #ddd; }
-  .parity1 { background-color: #eee; }
+  .parity0 { background-color: #ddd; color: #000; }
+  .parity1 { background-color: #eee; color: #000; }
   .lineno { width: 60px; color: #aaa; font-size: smaller;
             text-align: right; }
   .plusline { color: green; }
@@ -2052,6 +2047,35 @@
   	top: -1px;
   }
 
+Stop and restart the server at the directory different from the repository
+root. Even in such case, file patterns should be resolved relative to the
+repository root. (issue4568)
+
+  $ killdaemons.py
+  $ hg serve --config server.preferuncompressed=True -n test \
+  > -p $HGPORT -d --pid-file=`pwd`/hg.pid -E `pwd`/errors.log \
+  > --cwd .. -R `pwd`
+  $ cat hg.pid >> $DAEMON_PIDS
+
+  $ get-with-headers.py 127.0.0.1:$HGPORT 'log?rev=adds("foo")&style=raw'
+  200 Script output follows
+  
+  
+  # HG changesets search
+  # Node ID cad8025a2e87f88c06259790adfa15acb4080123
+  # Query "adds("foo")"
+  # Mode revset expression search
+  
+  changeset:   2ef0ac749a14e4f57a5a822464a0902c6f7f448f
+  revision:    0
+  user:        test
+  date:        Thu, 01 Jan 1970 00:00:00 +0000
+  summary:     base
+  tag:         1.0
+  bookmark:    anotherthing
+  
+  
+
 Stop and restart with HGENCODING=cp932 and preferuncompressed
 
   $ killdaemons.py
--- a/tests/test-hgweb-descend-empties.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-hgweb-descend-empties.t	Tue Oct 20 15:59:10 2015 -0500
@@ -47,7 +47,7 @@
   <div class="container">
   <div class="menu">
   <div class="logo">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" alt="mercurial" /></a>
   </div>
   <ul>
@@ -156,7 +156,8 @@
   <head>
   <link rel="icon" href="/static/hgicon.png" type="image/png" />
   <meta name="robots" content="index, nofollow" />
-  <link rel="stylesheet" href="/static/style-coal.css" type="text/css" />
+  <link rel="stylesheet" href="/static/style-paper.css" type="text/css" />
+  <link rel="stylesheet" href="/static/style-extra-coal.css" type="text/css" />
   <script type="text/javascript" src="/static/mercurial.js"></script>
   
   <title>test: c9f45f7a1659 /</title>
@@ -166,7 +167,7 @@
   <div class="container">
   <div class="menu">
   <div class="logo">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" alt="mercurial" /></a>
   </div>
   <ul>
@@ -370,14 +371,9 @@
       </div>
   
       <div id="powered-by">
-          <p><a href="http://mercurial.selenic.com/" title="Mercurial"><img src="/static/hglogo.png" width=75 height=90 border=0 alt="mercurial" /></a></p>
+          <p><a href="https://mercurial-scm.org/" title="Mercurial"><img src="/static/hglogo.png" width=75 height=90 border=0 alt="mercurial" /></a></p>
       </div>
   
-      <div id="corner-top-left"></div>
-      <div id="corner-top-right"></div>
-      <div id="corner-bottom-left"></div>
-      <div id="corner-bottom-right"></div>
-  
   </div>
   
   </body>
@@ -407,7 +403,7 @@
   <body>
   
   <div class="page_header">
-  <a href="http://mercurial.selenic.com/" title="Mercurial" style="float: right;">Mercurial</a>
+  <a href="https://mercurial-scm.org/" title="Mercurial" style="float: right;">Mercurial</a>
   <a href="/">Mercurial</a>  / files
   </div>
   
@@ -558,7 +554,7 @@
   <script type="text/javascript">process_dates()</script>
   
   <div class="logo">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" width=75 height=90 border=0 alt="mercurial"></a>
   </div>
   
--- a/tests/test-hgweb-diffs.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-hgweb-diffs.t	Tue Oct 20 15:59:10 2015 -0500
@@ -53,7 +53,7 @@
   <div class="container">
   <div class="menu">
   <div class="logo">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" alt="mercurial" /></a>
   </div>
   <ul>
@@ -222,7 +222,7 @@
   <div class="container">
   <div class="menu">
   <div class="logo">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" alt="mercurial" /></a>
   </div>
   <ul>
@@ -331,7 +331,7 @@
   <div class="container">
   <div class="menu">
   <div class="logo">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" alt="mercurial" /></a>
   </div>
   <ul>
@@ -504,7 +504,7 @@
   <div class="container">
   <div class="menu">
   <div class="logo">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" alt="mercurial" /></a>
   </div>
   <ul>
@@ -610,7 +610,7 @@
   <div class="container">
   <div class="menu">
   <div class="logo">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" alt="mercurial" /></a>
   </div>
   <ul>
@@ -740,7 +740,7 @@
   <div class="container">
   <div class="menu">
   <div class="logo">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" alt="mercurial" /></a>
   </div>
   <ul>
@@ -872,7 +872,7 @@
   <div class="container">
   <div class="menu">
   <div class="logo">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" alt="mercurial" /></a>
   </div>
   <ul>
@@ -1010,7 +1010,7 @@
   <div class="container">
   <div class="menu">
   <div class="logo">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" alt="mercurial" /></a>
   </div>
   <ul>
--- a/tests/test-hgweb-empty.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-hgweb-empty.t	Tue Oct 20 15:59:10 2015 -0500
@@ -28,7 +28,7 @@
   <div class="container">
   <div class="menu">
   <div class="logo">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" alt="mercurial" /></a>
   </div>
   <ul>
@@ -139,7 +139,7 @@
   <div class="container">
   <div class="menu">
   <div class="logo">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" alt="mercurial" /></a>
   </div>
   <ul>
@@ -249,7 +249,7 @@
   <div class="container">
   <div class="menu">
   <div class="logo">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" alt="mercurial" /></a>
   </div>
   <ul>
@@ -398,7 +398,7 @@
   <div class="container">
   <div class="menu">
   <div class="logo">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" alt="mercurial" /></a>
   </div>
   <ul>
--- a/tests/test-hgweb-filelog.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-hgweb-filelog.t	Tue Oct 20 15:59:10 2015 -0500
@@ -153,7 +153,7 @@
   <div class="container">
   <div class="menu">
   <div class="logo">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" alt="mercurial" /></a>
   </div>
   <ul>
@@ -270,7 +270,7 @@
   <div class="container">
   <div class="menu">
   <div class="logo">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" alt="mercurial" /></a>
   </div>
   <ul>
@@ -387,7 +387,7 @@
   <div class="container">
   <div class="menu">
   <div class="logo">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" alt="mercurial" /></a>
   </div>
   <ul>
@@ -496,7 +496,7 @@
   <div class="container">
   <div class="menu">
   <div class="logo">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" alt="mercurial" /></a>
   </div>
   <ul>
@@ -601,7 +601,7 @@
   <div class="container">
   <div class="menu">
   <div class="logo">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" width=75 height=90 border=0 alt="mercurial" /></a>
   </div>
   <ul>
@@ -748,7 +748,7 @@
   <script type="text/javascript">process_dates()</script>
   
   <div class="logo">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" width=75 height=90 border=0 alt="mercurial"></a>
   </div>
   
--- a/tests/test-hgweb-non-interactive.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-hgweb-non-interactive.t	Tue Oct 20 15:59:10 2015 -0500
@@ -58,13 +58,15 @@
   > }
   > 
   > i = hgweb('.')
-  > i(env, startrsp)
+  > for c in i(env, startrsp):
+  >     pass
   > print '---- ERRORS'
   > print errors.getvalue()
   > print '---- OS.ENVIRON wsgi variables'
   > print sorted([x for x in os.environ if x.startswith('wsgi')])
   > print '---- request.ENVIRON wsgi variables'
-  > print sorted([x for x in i.repo.ui.environ if x.startswith('wsgi')])
+  > with i._obtainrepo() as repo:
+  >     print sorted([x for x in repo.ui.environ if x.startswith('wsgi')])
   > EOF
   $ python request.py
   ---- STATUS
--- a/tests/test-hgweb-removed.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-hgweb-removed.t	Tue Oct 20 15:59:10 2015 -0500
@@ -34,7 +34,7 @@
   <div class="container">
   <div class="menu">
   <div class="logo">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" alt="mercurial" /></a>
   </div>
   <ul>
@@ -158,7 +158,7 @@
   <div class="container">
   <div class="menu">
   <div class="logo">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" alt="mercurial" /></a>
   </div>
   <ul>
--- a/tests/test-hgweb-symrev.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-hgweb-symrev.t	Tue Oct 20 15:59:10 2015 -0500
@@ -376,8 +376,8 @@
    annotate foo @ 1:<a href="/rev/a7c1559b7bba?style=coal">a7c1559b7bba</a>
    <td class="author"><a href="/file/43c799df6e75/foo?style=coal">43c799df6e75</a> </td>
    <td class="author"><a href="/file/9d8c40cba617/foo?style=coal">9d8c40cba617</a> </td>
-  <a href="/annotate/43c799df6e75/foo?style=coal#1"
-  <a href="/annotate/a7c1559b7bba/foo?style=coal#2"
+  <a href="/annotate/43c799df6e75/foo?style=coal#l1"
+  <a href="/annotate/a7c1559b7bba/foo?style=coal#l2"
 
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'diff/xyzzy/foo?style=coal' | egrep $REVLINKS
   <li><a href="/shortlog/xyzzy?style=coal">log</a></li>
@@ -422,17 +422,18 @@
   <a class="list" href="/rev/43c799df6e75?style=gitweb">
   <a href="/rev/43c799df6e75?style=gitweb">changeset</a> |
   <a href="/file/43c799df6e75?style=gitweb">files</a>
-  <td><a class="list" href="/rev/a7c1559b7bba?style=gitweb"><b>xyzzy</b></a></td>
+  <td><a class="list" href="/rev/xyzzy?style=gitweb"><b>xyzzy</b></a></td>
   <a href="/rev/a7c1559b7bba?style=gitweb">changeset</a> |
   <a href="/log/a7c1559b7bba?style=gitweb">changelog</a> |
   <a href="/file/a7c1559b7bba?style=gitweb">files</a>
-  <td><a class="list" href="/shortlog/9d8c40cba617?style=gitweb"><b>9d8c40cba617</b></a></td>
+  <td class="open"><a class="list" href="/shortlog/default?style=gitweb"><b>default</b></a></td>
   <a href="/changeset/9d8c40cba617?style=gitweb">changeset</a> |
   <a href="/log/9d8c40cba617?style=gitweb">changelog</a> |
   <a href="/file/9d8c40cba617?style=gitweb">files</a>
 
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'shortlog?style=gitweb' | egrep $REVLINKS
   <a href="/log/tip?style=gitweb">changelog</a> |
+  <a href="/graph/tip?style=gitweb">graph</a> |
   <a href="/file/tip?style=gitweb">files</a> | <a href="/archive/tip.zip">zip</a>  |
   <br/><a href="/shortlog/43c799df6e75?style=gitweb">(0)</a> <a href="/shortlog/tip?style=gitweb">tip</a> <br/>
   <a class="list" href="/rev/9d8c40cba617?style=gitweb">
@@ -448,6 +449,7 @@
 
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'log?style=gitweb' | egrep $REVLINKS
   <a href="/shortlog/tip?style=gitweb">shortlog</a> |
+  <a href="/graph/tip?style=gitweb">graph</a> |
   <a href="/file/tip?style=gitweb">files</a> | <a href="/archive/tip.zip">zip</a>  |
   <a href="/log/43c799df6e75?style=gitweb">(0)</a>  <a href="/log/tip?style=gitweb">tip</a> <br/>
   <a class="title" href="/rev/9d8c40cba617?style=gitweb"><span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span>third<span class="logtags"> <span class="branchtag" title="default">default</span> <span class="tagtag" title="tip">tip</span> </span></a>
@@ -459,6 +461,7 @@
   <a href="/log/43c799df6e75?style=gitweb">(0)</a>  <a href="/log/tip?style=gitweb">tip</a> <br/>
 
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'graph?style=gitweb' | egrep $REVLINKS
+  <a href="/shortlog/tip?style=gitweb">shortlog</a> |
   <a href="/log/tip?style=gitweb">changelog</a> |
   <a href="/file/tip?style=gitweb">files</a> |
   <a href="/graph/tip?revcount=30&style=gitweb">less</a>
@@ -469,19 +472,19 @@
   | <a href="/graph/43c799df6e75?style=gitweb">(0)</a> <a href="/graph/tip?style=gitweb">tip</a> 
 
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'tags?style=gitweb' | egrep $REVLINKS
-  <td><a class="list" href="/rev/9d8c40cba617?style=gitweb"><b>tip</b></a></td>
+  <td><a class="list" href="/rev/tip?style=gitweb"><b>tip</b></a></td>
   <a href="/rev/9d8c40cba617?style=gitweb">changeset</a> |
   <a href="/log/9d8c40cba617?style=gitweb">changelog</a> |
   <a href="/file/9d8c40cba617?style=gitweb">files</a>
 
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'bookmarks?style=gitweb' | egrep $REVLINKS
-  <td><a class="list" href="/rev/a7c1559b7bba?style=gitweb"><b>xyzzy</b></a></td>
+  <td><a class="list" href="/rev/xyzzy?style=gitweb"><b>xyzzy</b></a></td>
   <a href="/rev/a7c1559b7bba?style=gitweb">changeset</a> |
   <a href="/log/a7c1559b7bba?style=gitweb">changelog</a> |
   <a href="/file/a7c1559b7bba?style=gitweb">files</a>
 
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'branches?style=gitweb' | egrep $REVLINKS
-  <td><a class="list" href="/shortlog/9d8c40cba617?style=gitweb"><b>9d8c40cba617</b></a></td>
+  <td class="open"><a class="list" href="/shortlog/default?style=gitweb"><b>default</b></a></td>
   <a href="/changeset/9d8c40cba617?style=gitweb">changeset</a> |
   <a href="/log/9d8c40cba617?style=gitweb">changelog</a> |
   <a href="/file/9d8c40cba617?style=gitweb">files</a>
@@ -509,6 +512,7 @@
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'rev/xyzzy?style=gitweb' | egrep $REVLINKS
   <a href="/shortlog/xyzzy?style=gitweb">shortlog</a> |
   <a href="/log/xyzzy?style=gitweb">changelog</a> |
+  <a href="/graph/xyzzy?style=gitweb">graph</a> |
   <a href="/file/xyzzy?style=gitweb">files</a> |
   <a href="/raw-rev/xyzzy">raw</a>  | <a href="/archive/xyzzy.zip">zip</a>  |
   <a class="title" href="/raw-rev/a7c1559b7bba">second <span class="logtags"><span class="bookmarktag" title="xyzzy">xyzzy</span> </span></a>
@@ -524,6 +528,7 @@
 
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'shortlog/xyzzy?style=gitweb' | egrep $REVLINKS
   <a href="/log/xyzzy?style=gitweb">changelog</a> |
+  <a href="/graph/xyzzy?style=gitweb">graph</a> |
   <a href="/file/xyzzy?style=gitweb">files</a> | <a href="/archive/xyzzy.zip">zip</a>  |
   <br/><a href="/shortlog/43c799df6e75?style=gitweb">(0)</a> <a href="/shortlog/tip?style=gitweb">tip</a> <br/>
   <a class="list" href="/rev/a7c1559b7bba?style=gitweb">
@@ -536,6 +541,7 @@
 
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'log/xyzzy?style=gitweb' | egrep $REVLINKS
   <a href="/shortlog/xyzzy?style=gitweb">shortlog</a> |
+  <a href="/graph/xyzzy?style=gitweb">graph</a> |
   <a href="/file/xyzzy?style=gitweb">files</a> | <a href="/archive/xyzzy.zip">zip</a>  |
   <a href="/log/43c799df6e75?style=gitweb">(0)</a>  <a href="/log/tip?style=gitweb">tip</a> <br/>
   <a class="title" href="/rev/a7c1559b7bba?style=gitweb"><span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span>second<span class="logtags"> <span class="bookmarktag" title="xyzzy">xyzzy</span> </span></a>
@@ -545,6 +551,7 @@
   <a href="/log/43c799df6e75?style=gitweb">(0)</a>  <a href="/log/tip?style=gitweb">tip</a> <br/>
 
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'graph/xyzzy?style=gitweb' | egrep $REVLINKS
+  <a href="/shortlog/xyzzy?style=gitweb">shortlog</a> |
   <a href="/log/xyzzy?style=gitweb">changelog</a> |
   <a href="/file/xyzzy?style=gitweb">files</a> |
   <a href="/graph/xyzzy?revcount=30&style=gitweb">less</a>
@@ -639,6 +646,7 @@
 (De)referencing symbolic revisions (monoblue)
 
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'summary?style=monoblue' | egrep $REVLINKS
+              <li><a href="/archive/tip.zip">zip</a></li>
   <a href="/rev/9d8c40cba617?style=monoblue">
   <a href="/rev/9d8c40cba617?style=monoblue">changeset</a> |
   <a href="/file/9d8c40cba617?style=monoblue">files</a>
@@ -648,11 +656,11 @@
   <a href="/rev/43c799df6e75?style=monoblue">
   <a href="/rev/43c799df6e75?style=monoblue">changeset</a> |
   <a href="/file/43c799df6e75?style=monoblue">files</a>
-  <td><a href="/rev/a7c1559b7bba?style=monoblue">xyzzy</a></td>
+  <td><a href="/rev/xyzzy?style=monoblue">xyzzy</a></td>
   <a href="/rev/a7c1559b7bba?style=monoblue">changeset</a> |
   <a href="/log/a7c1559b7bba?style=monoblue">changelog</a> |
   <a href="/file/a7c1559b7bba?style=monoblue">files</a>
-  <td><a href="/shortlog/9d8c40cba617?style=monoblue">9d8c40cba617</a></td>
+  <td class="open"><a href="/shortlog/default?style=monoblue">default</a></td>
   <a href="/rev/9d8c40cba617?style=monoblue">changeset</a> |
   <a href="/log/9d8c40cba617?style=monoblue">changelog</a> |
   <a href="/file/9d8c40cba617?style=monoblue">files</a>
@@ -688,19 +696,19 @@
           | <a href="/graph/43c799df6e75?style=monoblue">(0)</a> <a href="/graph/tip?style=monoblue">tip</a> 
 
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'tags?style=monoblue' | egrep $REVLINKS
-  <td><a href="/rev/9d8c40cba617?style=monoblue">tip</a></td>
+  <td><a href="/rev/tip?style=monoblue">tip</a></td>
   <a href="/rev/9d8c40cba617?style=monoblue">changeset</a> |
   <a href="/log/9d8c40cba617?style=monoblue">changelog</a> |
   <a href="/file/9d8c40cba617?style=monoblue">files</a>
 
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'bookmarks?style=monoblue' | egrep $REVLINKS
-  <td><a href="/rev/a7c1559b7bba?style=monoblue">xyzzy</a></td>
+  <td><a href="/rev/xyzzy?style=monoblue">xyzzy</a></td>
   <a href="/rev/a7c1559b7bba?style=monoblue">changeset</a> |
   <a href="/log/a7c1559b7bba?style=monoblue">changelog</a> |
   <a href="/file/a7c1559b7bba?style=monoblue">files</a>
 
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'branches?style=monoblue' | egrep $REVLINKS
-  <td><a href="/shortlog/9d8c40cba617?style=monoblue">9d8c40cba617</a></td>
+  <td class="open"><a href="/shortlog/default?style=monoblue">default</a></td>
   <a href="/rev/9d8c40cba617?style=monoblue">changeset</a> |
   <a href="/log/9d8c40cba617?style=monoblue">changelog</a> |
   <a href="/file/9d8c40cba617?style=monoblue">files</a>
@@ -782,6 +790,7 @@
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'file/xyzzy/foo?style=monoblue' | egrep $REVLINKS
               <li><a href="/graph/xyzzy?style=monoblue">graph</a></li>
               <li><a href="/file/xyzzy/?style=monoblue">files</a></li>
+          <li><a href="/file/tip/foo?style=monoblue">latest</a></li>
           <li><a href="/log/xyzzy/foo?style=monoblue">revisions</a></li>
           <li><a href="/annotate/xyzzy/foo?style=monoblue">annotate</a></li>
           <li><a href="/diff/xyzzy/foo?style=monoblue">diff</a></li>
@@ -800,15 +809,20 @@
           <li><a href="/comparison/xyzzy/foo?style=monoblue">comparison</a></li>
           <li><a href="/rss-log/tip/foo">rss</a></li>
   <a href="/rev/a7c1559b7bba?style=monoblue">
-  <a href="/file/a7c1559b7bba/foo?style=monoblue">file</a>&nbsp;|&nbsp;<a href="/diff/a7c1559b7bba/foo?style=monoblue">diff</a>&nbsp;|&nbsp;<a href="/annotate/a7c1559b7bba/foo?style=monoblue">annotate</a>
+  <a href="/file/a7c1559b7bba/foo?style=monoblue">file</a> |
+  <a href="/diff/a7c1559b7bba/foo?style=monoblue">diff</a> |
+  <a href="/annotate/a7c1559b7bba/foo?style=monoblue">annotate</a>
   <a href="/rev/43c799df6e75?style=monoblue">
-  <a href="/file/43c799df6e75/foo?style=monoblue">file</a>&nbsp;|&nbsp;<a href="/diff/43c799df6e75/foo?style=monoblue">diff</a>&nbsp;|&nbsp;<a href="/annotate/43c799df6e75/foo?style=monoblue">annotate</a>
+  <a href="/file/43c799df6e75/foo?style=monoblue">file</a> |
+  <a href="/diff/43c799df6e75/foo?style=monoblue">diff</a> |
+  <a href="/annotate/43c799df6e75/foo?style=monoblue">annotate</a>
       <a href="/log/43c799df6e75/foo?style=monoblue">(0)</a><a href="/log/tip/foo?style=monoblue">tip</a>
 
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'annotate/xyzzy/foo?style=monoblue' | egrep $REVLINKS
               <li><a href="/graph/xyzzy?style=monoblue">graph</a></li>
               <li><a href="/file/xyzzy/?style=monoblue">files</a></li>
           <li><a href="/file/xyzzy/foo?style=monoblue">file</a></li>
+          <li><a href="/file/tip/foo?style=monoblue">latest</a></li>
           <li><a href="/log/xyzzy/foo?style=monoblue">revisions</a></li>
           <li><a href="/diff/xyzzy/foo?style=monoblue">diff</a></li>
           <li><a href="/comparison/xyzzy/foo?style=monoblue">comparison</a></li>
@@ -823,6 +837,7 @@
               <li><a href="/graph/xyzzy?style=monoblue">graph</a></li>
               <li><a href="/file/xyzzy?style=monoblue">files</a></li>
           <li><a href="/file/xyzzy/foo?style=monoblue">file</a></li>
+          <li><a href="/file/tip/foo?style=monoblue">latest</a></li>
           <li><a href="/log/xyzzy/foo?style=monoblue">revisions</a></li>
           <li><a href="/annotate/xyzzy/foo?style=monoblue">annotate</a></li>
           <li><a href="/comparison/xyzzy/foo?style=monoblue">comparison</a></li>
@@ -835,6 +850,7 @@
               <li><a href="/graph/xyzzy?style=monoblue">graph</a></li>
               <li><a href="/file/xyzzy?style=monoblue">files</a></li>
           <li><a href="/file/xyzzy/foo?style=monoblue">file</a></li>
+          <li><a href="/file/tip/foo?style=monoblue">latest</a></li>
           <li><a href="/log/xyzzy/foo?style=monoblue">revisions</a></li>
           <li><a href="/annotate/xyzzy/foo?style=monoblue">annotate</a></li>
           <li><a href="/diff/xyzzy/foo?style=monoblue">diff</a></li>
--- a/tests/test-hgweb.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-hgweb.t	Tue Oct 20 15:59:10 2015 -0500
@@ -61,7 +61,7 @@
   <div class="container">
   <div class="menu">
   <div class="logo">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" width=75 height=90 border=0 alt="mercurial" /></a>
   </div>
   <ul>
@@ -169,7 +169,7 @@
   <div class="container">
   <div class="menu">
   <div class="logo">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" width=75 height=90 border=0 alt="mercurial" /></a>
   </div>
   <ul>
@@ -241,7 +241,7 @@
   <div class="container">
   <div class="menu">
   <div class="logo">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" alt="mercurial" /></a>
   </div>
   <ul>
@@ -340,10 +340,10 @@
 
   $ get-with-headers.py --twice localhost:$HGPORT 'static/style-gitweb.css' - date etag server
   200 Script output follows
-  content-length: 5372
+  content-length: 6521
   content-type: text/css
   
-  body { font-family: sans-serif; font-size: 12px; border:solid #d9d8d1; border-width:1px; margin:10px; }
+  body { font-family: sans-serif; font-size: 12px; border:solid #d9d8d1; border-width:1px; margin:10px; background: white; color: black; }
   a { color:#0000cc; }
   a:hover, a:visited, a:active { color:#880000; }
   div.page_header { height:25px; padding:8px; font-size:18px; font-weight:bold; background-color:#d9d8d1; }
@@ -374,9 +374,12 @@
   a.list:hover { text-decoration:underline; color:#880000; }
   table { padding:8px 4px; }
   th { padding:2px 5px; font-size:12px; text-align:left; }
-  tr.light:hover, .parity0:hover { background-color:#edece6; }
-  tr.dark, .parity1 { background-color:#f6f6f0; }
-  tr.dark:hover, .parity1:hover { background-color:#edece6; }
+  tr.dark, .parity1, pre.sourcelines.stripes > :nth-child(4n+4) { background-color:#f6f6f0; }
+  tr.light:hover, .parity0:hover, tr.dark:hover, .parity1:hover,
+  pre.sourcelines.stripes > :nth-child(4n+2):hover,
+  pre.sourcelines.stripes > :nth-child(4n+4):hover,
+  pre.sourcelines.stripes > :nth-child(4n+1):hover + :nth-child(4n+2),
+  pre.sourcelines.stripes > :nth-child(4n+3):hover + :nth-child(4n+4) { background-color:#edece6; }
   td { padding:2px 5px; font-size:12px; vertical-align:top; }
   td.closed { background-color: #99f; }
   td.link { padding:2px 5px; font-family:sans-serif; font-size:10px; }
@@ -432,6 +435,43 @@
   span.difflineplus { color:#008800; }
   span.difflineminus { color:#cc0000; }
   span.difflineat { color:#990099; }
+  div.diffblocks { counter-reset: lineno; }
+  div.diffblock { counter-increment: lineno; }
+  pre.sourcelines { position: relative; counter-reset: lineno; }
+  pre.sourcelines > span {
+  	display: inline-block;
+  	box-sizing: border-box;
+  	width: 100%;
+  	padding: 0 0 0 5em;
+  	counter-increment: lineno;
+  	vertical-align: top;
+  }
+  pre.sourcelines > span:before {
+  	-moz-user-select: -moz-none;
+  	-khtml-user-select: none;
+  	-webkit-user-select: none;
+  	-ms-user-select: none;
+  	user-select: none;
+  	display: inline-block;
+  	margin-left: -5em;
+  	width: 4em;
+  	color: #999;
+  	text-align: right;
+  	content: counters(lineno,".");
+  	float: left;
+  }
+  pre.sourcelines > a {
+  	display: inline-block;
+  	position: absolute;
+  	left: 0px;
+  	width: 4em;
+  	height: 1em;
+  }
+  tr:target td,
+  pre.sourcelines > span:target,
+  pre.sourcelines.stripes > span:target {
+  	background-color: #bfdfff;
+  }
   
   /* Graph */
   div#wrapper {
@@ -539,6 +579,10 @@
   .scroll-loading-error {
       background-color: #FFCCCC !important;
   }
+  
+  #doc {
+      margin: 0 8px;
+  }
   304 Not Modified
   
 
--- a/tests/test-hgwebdir.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-hgwebdir.t	Tue Oct 20 15:59:10 2015 -0500
@@ -201,7 +201,7 @@
   
   <div class="container">
   <div class="menu">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" width=75 height=90 border=0 alt="mercurial" /></a>
   </div>
   <div class="main">
@@ -701,7 +701,7 @@
   
   <div class="container">
   <div class="menu">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" width=75 height=90 border=0 alt="mercurial" /></a>
   </div>
   <div class="main">
@@ -1152,7 +1152,7 @@
   
   <div class="container">
   <div class="menu">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" width=75 height=90 border=0 alt="mercurial" /></a>
   </div>
   <div class="main">
@@ -1245,6 +1245,67 @@
   $ get-with-headers.py localhost:$HGPORT2 'a/rss-log' | grep '<guid'
       <guid isPermaLink="true">http://hg.example.com:8080/foo/a/rev/8580ff50825a</guid>
 
+Path refreshing works as expected
+
+  $ killdaemons.py
+  $ mkdir $root/refreshtest
+  $ hg init $root/refreshtest/a
+  $ cat > paths.conf << EOF
+  > [paths]
+  > / = $root/refreshtest/*
+  > EOF
+  $ hg serve -p $HGPORT1 -d --pid-file hg.pid --webdir-conf paths.conf
+  $ cat hg.pid >> $DAEMON_PIDS
+
+  $ get-with-headers.py localhost:$HGPORT1 '?style=raw'
+  200 Script output follows
+  
+  
+  /a/
+  
+
+By default refreshing occurs every 20s and a new repo won't be listed
+immediately.
+
+  $ hg init $root/refreshtest/b
+  $ get-with-headers.py localhost:$HGPORT1 '?style=raw'
+  200 Script output follows
+  
+  
+  /a/
+  
+
+Restart the server with no refresh interval. New repo should appear
+immediately.
+
+  $ killdaemons.py
+  $ cat > paths.conf << EOF
+  > [web]
+  > refreshinterval = -1
+  > [paths]
+  > / = $root/refreshtest/*
+  > EOF
+  $ hg serve -p $HGPORT1 -d --pid-file hg.pid --webdir-conf paths.conf
+  $ cat hg.pid >> $DAEMON_PIDS
+
+  $ get-with-headers.py localhost:$HGPORT1 '?style=raw'
+  200 Script output follows
+  
+  
+  /a/
+  /b/
+  
+
+  $ hg init $root/refreshtest/c
+  $ get-with-headers.py localhost:$HGPORT1 '?style=raw'
+  200 Script output follows
+  
+  
+  /a/
+  /b/
+  /c/
+  
+
 paths errors 1
 
   $ cat error-paths-1.log
--- a/tests/test-highlight.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-highlight.t	Tue Oct 20 15:59:10 2015 -0500
@@ -5,6 +5,7 @@
   > highlight =
   > [web]
   > pygments_style = friendly
+  > highlightfiles = **.py and size('<100KB')
   > EOF
   $ hg init test
   $ cd test
@@ -76,7 +77,7 @@
   <div class="container">
   <div class="menu">
   <div class="logo">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" alt="mercurial" /></a>
   </div>
   <ul>
@@ -142,7 +143,7 @@
   <div class="overflow">
   <div class="sourcefirst linewraptoggle">line wrap: <a class="linewraplink" href="javascript:toggleLinewrap()">on</a></div>
   <div class="sourcefirst"> line source</div>
-  <pre class="sourcelines stripes4 wrap">
+  <pre class="sourcelines stripes4 wrap bottomline">
   <span id="l1"><span class="c">#!/usr/bin/env python</span></span><a href="#l1"></a>
   <span id="l2"></span><a href="#l2"></a>
   <span id="l3"><span class="sd">&quot;&quot;&quot;Fun with generators. Corresponding Haskell implementation:</span></span><a href="#l3"></a>
@@ -176,7 +177,6 @@
   <span id="l31">    <span class="n">p</span> <span class="o">=</span> <span class="n">primes</span><span class="p">()</span></span><a href="#l31"></a>
   <span id="l32">    <span class="kn">print</span> <span class="s">&quot;The first </span><span class="si">%d</span><span class="s"> primes: </span><span class="si">%s</span><span class="s">&quot;</span> <span class="o">%</span> <span class="p">(</span><span class="n">n</span><span class="p">,</span> <span class="nb">list</span><span class="p">(</span><span class="n">islice</span><span class="p">(</span><span class="n">p</span><span class="p">,</span> <span class="n">n</span><span class="p">)))</span></span><a href="#l32"></a>
   <span id="l33"></span><a href="#l33"></a></pre>
-  <div class="sourcelast"></div>
   </div>
   </div>
   </div>
@@ -210,7 +210,7 @@
   <div class="container">
   <div class="menu">
   <div class="logo">
-  <a href="http://mercurial.selenic.com/">
+  <a href="https://mercurial-scm.org/">
   <img src="/static/hglogo.png" alt="mercurial" /></a>
   </div>
   <ul>
@@ -591,6 +591,28 @@
 errors encountered
 
   $ cat errors.log
+  $ killdaemons.py
+
+only highlight C source files
+
+  $ cat > .hg/hgrc <<EOF
+  > [web]
+  > highlightfiles = **.c
+  > EOF
+
+hg serve again
+
+  $ hg serve -p $HGPORT -d -n test --pid-file=hg.pid -A access.log -E errors.log
+  $ cat hg.pid >> $DAEMON_PIDS
+
+test that fileset in highlightfiles works and primes.py is not highlighted
+
+  $ get-with-headers.py localhost:$HGPORT 'file/tip/primes.py' | grep 'id="l11"'
+  <span id="l11">def primes():</span><a href="#l11"></a>
+
+errors encountered
+
+  $ cat errors.log
   $ cd ..
   $ hg init eucjp
   $ cd eucjp
@@ -622,4 +644,43 @@
   % hgweb filerevision, html
   % errors encountered
 
+We attempt to highlight unknown files by default
+
+  $ killdaemons.py
+
+  $ cat > .hg/hgrc << EOF
+  > [web]
+  > highlightfiles = **
+  > EOF
+
+  $ cat > unknownfile << EOF
+  > #!/usr/bin/python
+  > def foo():
+  >    pass
+  > EOF
+
+  $ hg add unknownfile
+  $ hg commit -m unknown unknownfile
+
+  $ hg serve -p $HGPORT -d -n test --pid-file=hg.pid
+  $ cat hg.pid >> $DAEMON_PIDS
+
+  $ get-with-headers.py localhost:$HGPORT 'file/tip/unknownfile' | grep l2
+  <span id="l2"><span class="k">def</span> <span class="nf">foo</span><span class="p">():</span></span><a href="#l2"></a>
+
+We can prevent Pygments from falling back to a non filename-based
+detection mode
+
+  $ cat > .hg/hgrc << EOF
+  > [web]
+  > highlightfiles = **
+  > highlightonlymatchfilename = true
+  > EOF
+
+  $ killdaemons.py
+  $ hg serve -p $HGPORT -d -n test --pid-file=hg.pid
+  $ cat hg.pid >> $DAEMON_PIDS
+  $ get-with-headers.py localhost:$HGPORT 'file/tip/unknownfile' | grep l2
+  <span id="l2">def foo():</span><a href="#l2"></a>
+
   $ cd ..
--- a/tests/test-histedit-arguments.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-histedit-arguments.t	Tue Oct 20 15:59:10 2015 -0500
@@ -69,7 +69,7 @@
   #  f, fold = use commit, but combine it with the one above
   #  r, roll = like fold, but discard this commit's description
   #  d, drop = remove commit from history
-  #  m, mess = edit message without changing commit content
+  #  m, mess = edit commit message without changing commit content
   #
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
 
@@ -253,6 +253,7 @@
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   saved backup bundle to $TESTTMP/foo/.hg/strip-backup/*-backup.hg (glob)
+  saved backup bundle to $TESTTMP/foo/.hg/strip-backup/c8e68270e35a-23a13bf9-backup.hg (glob)
 
   $ hg update -q 2
   $ echo x > x
@@ -292,7 +293,7 @@
   #  f, fold = use commit, but combine it with the one above
   #  r, roll = like fold, but discard this commit's description
   #  d, drop = remove commit from history
-  #  m, mess = edit message without changing commit content
+  #  m, mess = edit commit message without changing commit content
   #
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
 
@@ -324,3 +325,25 @@
   |
   o  0:6058cbb6cfd7 one
   
+
+Test that abort fails gracefully on exception
+----------------------------------------------
+  $ hg histedit . -q --commands - << EOF
+  > edit 8fda0c726bf2 6 x
+  > EOF
+  Make changes as needed, you may commit or record as needed now.
+  When you are finished, run hg histedit --continue to resume.
+  [1]
+Corrupt histedit state file
+  $ sed 's/8fda0c726bf2/123456789012/' .hg/histedit-state > ../corrupt-histedit
+  $ mv ../corrupt-histedit .hg/histedit-state
+  $ hg histedit --abort
+  warning: encountered an exception during histedit --abort; the repository may not have been completely cleaned up
+  abort: No such file or directory: * (glob)
+  [255]
+Histedit state has been exited
+  $ hg summary -q
+  parent: 5:63379946892c 
+  commit: 1 added, 1 unknown (new branch head)
+  update: 4 new changesets (update)
+
--- a/tests/test-histedit-bookmark-motion.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-histedit-bookmark-motion.t	Tue Oct 20 15:59:10 2015 -0500
@@ -75,7 +75,7 @@
   #  f, fold = use commit, but combine it with the one above
   #  r, roll = like fold, but discard this commit's description
   #  d, drop = remove commit from history
-  #  m, mess = edit message without changing commit content
+  #  m, mess = edit commit message without changing commit content
   #
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ hg histedit 1 --commands - --verbose << EOF | grep histedit
@@ -85,6 +85,7 @@
   > fold e860deea161a 4 e
   > pick 652413bf663e 5 f
   > EOF
+  saved backup bundle to $TESTTMP/r/.hg/strip-backup/96e494a2d553-3c6c5d92-backup.hg (glob)
   histedit: moving bookmarks also-two from 177f92b77385 to b346ab9a313d
   histedit: moving bookmarks five from 652413bf663e to cacdfd884a93
   histedit: moving bookmarks four from e860deea161a to 59d9f330561f
@@ -92,7 +93,6 @@
   histedit: moving bookmarks two from 177f92b77385 to b346ab9a313d
   histedit: moving bookmarks will-move-backwards from d2ae7f538514 to cb9a9f314b8b
   saved backup bundle to $TESTTMP/r/.hg/strip-backup/d2ae7f538514-48787b8d-backup.hg (glob)
-  saved backup bundle to $TESTTMP/r/.hg/strip-backup/96e494a2d553-3c6c5d92-backup.hg (glob)
   $ hg log --graph
   @  changeset:   3:cacdfd884a93
   |  bookmark:    five
@@ -136,7 +136,7 @@
   #  f, fold = use commit, but combine it with the one above
   #  r, roll = like fold, but discard this commit's description
   #  d, drop = remove commit from history
-  #  m, mess = edit message without changing commit content
+  #  m, mess = edit commit message without changing commit content
   #
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ hg histedit 1 --commands - --verbose << EOF | grep histedit
--- a/tests/test-histedit-commute.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-histedit-commute.t	Tue Oct 20 15:59:10 2015 -0500
@@ -69,7 +69,7 @@
   #  f, fold = use commit, but combine it with the one above
   #  r, roll = like fold, but discard this commit's description
   #  d, drop = remove commit from history
-  #  m, mess = edit message without changing commit content
+  #  m, mess = edit commit message without changing commit content
   #
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
 
@@ -347,7 +347,7 @@
   #  f, fold = use commit, but combine it with the one above
   #  r, roll = like fold, but discard this commit's description
   #  d, drop = remove commit from history
-  #  m, mess = edit message without changing commit content
+  #  m, mess = edit commit message without changing commit content
   #
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
 
@@ -424,6 +424,7 @@
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   saved backup bundle to $TESTTMP/issue4251/.hg/strip-backup/*-backup.hg (glob)
+  saved backup bundle to $TESTTMP/issue4251/.hg/strip-backup/b0f4233702ca-d99e7186-backup.hg (glob)
 
   $ hg --config diff.git=yes export 0
   # HG changeset patch
--- a/tests/test-histedit-edit.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-histedit-edit.t	Tue Oct 20 15:59:10 2015 -0500
@@ -298,11 +298,11 @@
 check saving last-message.txt, at first
 
   $ cat > $TESTTMP/commitfailure.py <<EOF
-  > from mercurial import util
+  > from mercurial import error
   > def reposetup(ui, repo):
   >     class commitfailure(repo.__class__):
   >         def commit(self, *args, **kwargs):
-  >             raise util.Abort('emulating unexpected abort')
+  >             raise error.Abort('emulating unexpected abort')
   >     repo.__class__ = commitfailure
   > EOF
   $ cat >> .hg/hgrc <<EOF
--- a/tests/test-histedit-fold-non-commute.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-histedit-fold-non-commute.t	Tue Oct 20 15:59:10 2015 -0500
@@ -88,8 +88,7 @@
   $ hg histedit 3 --commands $EDITED 2>&1 | fixbundle
   2 files updated, 0 files merged, 0 files removed, 0 files unresolved
   merging e
-  warning: conflicts during merge.
-  merging e incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging e! (edit, then use 'hg resolve --mark')
   Fix up the change and run hg histedit --continue
 
 fix up
@@ -123,8 +122,7 @@
   2 files updated, 0 files merged, 0 files removed, 0 files unresolved
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   merging e
-  warning: conflicts during merge.
-  merging e incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging e! (edit, then use 'hg resolve --mark')
   Fix up the change and run hg histedit --continue
 
 just continue this time
@@ -253,8 +251,7 @@
   $ hg histedit 3 --commands $EDITED 2>&1 | fixbundle
   2 files updated, 0 files merged, 0 files removed, 0 files unresolved
   merging e
-  warning: conflicts during merge.
-  merging e incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging e! (edit, then use 'hg resolve --mark')
   Fix up the change and run hg histedit --continue
 
 fix up
@@ -266,8 +263,7 @@
   2 files updated, 0 files merged, 0 files removed, 0 files unresolved
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   merging e
-  warning: conflicts during merge.
-  merging e incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging e! (edit, then use 'hg resolve --mark')
   Fix up the change and run hg histedit --continue
 
 just continue this time
--- a/tests/test-histedit-fold.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-histedit-fold.t	Tue Oct 20 15:59:10 2015 -0500
@@ -296,8 +296,7 @@
   > EOF
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   merging file
-  warning: conflicts during merge.
-  merging file incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging file! (edit, then use 'hg resolve --mark')
   Fix up the change and run hg histedit --continue
   [1]
 There were conflicts, we keep P1 content. This
@@ -358,8 +357,7 @@
   > EOF
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   merging file
-  warning: conflicts during merge.
-  merging file incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging file! (edit, then use 'hg resolve --mark')
   Fix up the change and run hg histedit --continue
   [1]
   $ cat > file << EOF
@@ -392,7 +390,8 @@
   HG: changed file
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  saved backup bundle to $TESTTMP/fold-with-dropped/.hg/strip-backup/617f94f13c0f-3d69522c-backup.hg (glob)
+  saved backup bundle to $TESTTMP/fold-with-dropped/.hg/strip-backup/55c8d8dc79ce-4066cd98-backup.hg (glob)
+  saved backup bundle to $TESTTMP/fold-with-dropped/.hg/strip-backup/617f94f13c0f-a35700fc-backup.hg (glob)
   $ hg logt -G
   @  1:10c647b2cdd5 +4
   |
@@ -509,4 +508,64 @@
   $ hg add amended.txt
   $ hg ci -q --config extensions.largefiles= --amend -I amended.txt
 
+Test that folding multiple changes in a row doesn't show multiple
+editors.
+
+  $ echo foo >> foo
+  $ hg add foo
+  $ hg ci -m foo1
+  $ echo foo >> foo
+  $ hg ci -m foo2
+  $ echo foo >> foo
+  $ hg ci -m foo3
+  $ hg logt
+  4:21679ff7675c foo3
+  3:b7389cc4d66e foo2
+  2:0e01aeef5fa8 foo1
+  1:578c7455730c a
+  0:79b99e9c8e49 b
+  $ cat > $TESTTMP/editor.sh <<EOF
+  > echo ran editor >> $TESTTMP/editorlog.txt
+  > cat \$1 >> $TESTTMP/editorlog.txt
+  > echo END >> $TESTTMP/editorlog.txt
+  > echo merged foos > \$1
+  > EOF
+  $ HGEDITOR="sh $TESTTMP/editor.sh" hg histedit 1 --commands - 2>&1 <<EOF | fixbundle
+  > pick 578c7455730c 1 a
+  > pick 0e01aeef5fa8 2 foo1
+  > fold b7389cc4d66e 3 foo2
+  > fold 21679ff7675c 4 foo3
+  > EOF
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  reverting foo
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  merging foo
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ hg logt
+  2:e8bedbda72c1 merged foos
+  1:578c7455730c a
+  0:79b99e9c8e49 b
+Editor should have run only once
+  $ cat $TESTTMP/editorlog.txt
+  ran editor
+  foo1
+  ***
+  foo2
+  ***
+  foo3
+  
+  
+  
+  HG: Enter commit message.  Lines beginning with 'HG:' are removed.
+  HG: Leave message empty to abort commit.
+  HG: --
+  HG: user: test
+  HG: branch 'default'
+  HG: added foo
+  END
+
   $ cd ..
--- a/tests/test-histedit-non-commute-abort.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-histedit-non-commute-abort.t	Tue Oct 20 15:59:10 2015 -0500
@@ -72,8 +72,7 @@
   0 files updated, 0 files merged, 2 files removed, 0 files unresolved
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   merging e
-  warning: conflicts during merge.
-  merging e incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging e! (edit, then use 'hg resolve --mark')
   Fix up the change and run hg histedit --continue
 
 
--- a/tests/test-histedit-non-commute.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-histedit-non-commute.t	Tue Oct 20 15:59:10 2015 -0500
@@ -89,8 +89,7 @@
   $ hg histedit 3 --commands $EDITED 2>&1 | fixbundle
   2 files updated, 0 files merged, 0 files removed, 0 files unresolved
   merging e
-  warning: conflicts during merge.
-  merging e incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging e! (edit, then use 'hg resolve --mark')
   Fix up the change and run hg histedit --continue
 
 abort the edit
@@ -147,8 +146,7 @@
   $ hg histedit 3 --commands $EDITED 2>&1 | fixbundle
   2 files updated, 0 files merged, 0 files removed, 0 files unresolved
   merging e
-  warning: conflicts during merge.
-  merging e incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging e! (edit, then use 'hg resolve --mark')
   Fix up the change and run hg histedit --continue
 
 fix up
@@ -158,8 +156,7 @@
   $ hg histedit --continue 2>&1 | fixbundle
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   merging e
-  warning: conflicts during merge.
-  merging e incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging e! (edit, then use 'hg resolve --mark')
   Fix up the change and run hg histedit --continue
 
 This failure is caused by 7b4e2f4b7bcd "e" not rebasing the non commutative
@@ -236,8 +233,7 @@
   $ hg histedit 3 --commands $EDITED 2>&1 | fixbundle
   2 files updated, 0 files merged, 0 files removed, 0 files unresolved
   merging e
-  warning: conflicts during merge.
-  merging e incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging e! (edit, then use 'hg resolve --mark')
   Fix up the change and run hg histedit --continue
 
   $ echo 'I can haz no commute' > e
@@ -246,8 +242,7 @@
   $ hg histedit --continue 2>&1 | fixbundle
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   merging e
-  warning: conflicts during merge.
-  merging e incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging e! (edit, then use 'hg resolve --mark')
   Fix up the change and run hg histedit --continue
 second edit also fails, but just continue
   $ hg revert -r 'p1()' e
--- a/tests/test-histedit-obsolete.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-histedit-obsolete.t	Tue Oct 20 15:59:10 2015 -0500
@@ -54,7 +54,7 @@
   #  f, fold = use commit, but combine it with the one above
   #  r, roll = like fold, but discard this commit's description
   #  d, drop = remove commit from history
-  #  m, mess = edit message without changing commit content
+  #  m, mess = edit commit message without changing commit content
   #
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ hg histedit 1 --commands - --verbose <<EOF | grep histedit
@@ -64,12 +64,16 @@
   > fold e860deea161a 4 e
   > pick 652413bf663e 5 f
   > EOF
-  saved backup bundle to $TESTTMP/base/.hg/strip-backup/96e494a2d553-3c6c5d92-backup.hg (glob)
+  [1]
   $ hg log --graph --hidden
-  @  8:cacdfd884a93 f
+  @  10:cacdfd884a93 f
+  |
+  o  9:59d9f330561f d
   |
-  o  7:59d9f330561f d
-  |
+  | x  8:b558abc46d09 fold-temp-revision e860deea161a
+  | |
+  | x  7:96e494a2d553 d
+  |/
   o  6:b346ab9a313d c
   |
   | x  5:652413bf663e f
@@ -85,6 +89,8 @@
   o  0:cb9a9f314b8b a
   
   $ hg debugobsolete
+  96e494a2d553dd05902ba1cee1d94d4cb7b8faed 0 {b346ab9a313db8537ecf96fca3ca3ca984ef3bd7} (*) {'user': 'test'} (glob)
+  b558abc46d09c30f57ac31e85a8a3d64d2e906e4 0 {96e494a2d553dd05902ba1cee1d94d4cb7b8faed} (*) {'user': 'test'} (glob)
   d2ae7f538514cd87c17547b0de4cea71fe1af9fb 0 {cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b} (*) {'user': 'test'} (glob)
   177f92b773850b59254aa5e923436f921b55483b b346ab9a313db8537ecf96fca3ca3ca984ef3bd7 0 (*) {'user': 'test'} (glob)
   055a42cdd88768532f9cf79daa407fc8d138de9b 59d9f330561fd6c88b1a6b32f0e45034d88db784 0 (*) {'user': 'test'} (glob)
@@ -105,7 +111,7 @@
   0 files updated, 0 files merged, 3 files removed, 0 files unresolved
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ hg log --graph
-  @  9:c13eb81022ca f
+  @  11:c13eb81022ca f
   |
   o  6:b346ab9a313d c
   |
@@ -127,7 +133,7 @@
   $ hg up '.^'
   0 files updated, 0 files merged, 1 files removed, 0 files unresolved
   $ hg log -r 'children(.)'
-  9:c13eb81022ca f (no-eol)
+  11:c13eb81022ca f (no-eol)
   $ hg histedit -r '.' --commands - <<EOF
   > edit b346ab9a313d 6 c
   > EOF
@@ -141,12 +147,12 @@
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
 
   $ hg log -r 'unstable()'
-  9:c13eb81022ca f (no-eol)
+  11:c13eb81022ca f (no-eol)
 
 stabilise
 
   $ hg rebase  -r 'unstable()' -d .
-  rebasing 9:c13eb81022ca "f"
+  rebasing 11:c13eb81022ca "f"
   $ hg up tip -q
 
 Test dropping of changeset on the top of the stack
@@ -166,7 +172,7 @@
   > EOF
   0 files updated, 0 files merged, 1 files removed, 0 files unresolved
   $ hg log -G
-  @  10:40db8afa467b c
+  @  12:40db8afa467b c
   |
   o  0:cb9a9f314b8b a
   
@@ -188,9 +194,9 @@
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ hg log -G
-  @  15:ee6544123ab8 c
+  @  17:ee6544123ab8 c
   |
-  o  14:269e713e9eae g
+  o  16:269e713e9eae g
   |
   o  0:cb9a9f314b8b a
   
@@ -212,9 +218,9 @@
   $ hg ph -pv '.^'
   phase changed for 2 changesets
   $ hg log -G
-  @  11:b449568bf7fc (draft) f
+  @  13:b449568bf7fc (draft) f
   |
-  o  10:40db8afa467b (public) c
+  o  12:40db8afa467b (public) c
   |
   o  0:cb9a9f314b8b (public) a
   
@@ -234,19 +240,19 @@
   > done
   $ hg phase --force --secret .~2
   $ hg log -G
-  @  16:ee118ab9fa44 (secret) k
+  @  18:ee118ab9fa44 (secret) k
   |
-  o  15:3a6c53ee7f3d (secret) j
+  o  17:3a6c53ee7f3d (secret) j
   |
-  o  14:b605fb7503f2 (secret) i
+  o  16:b605fb7503f2 (secret) i
   |
-  o  13:7395e1ff83bd (draft) h
+  o  15:7395e1ff83bd (draft) h
   |
-  o  12:6b70183d2492 (draft) g
+  o  14:6b70183d2492 (draft) g
   |
-  o  11:b449568bf7fc (draft) f
+  o  13:b449568bf7fc (draft) f
   |
-  o  10:40db8afa467b (public) c
+  o  12:40db8afa467b (public) c
   |
   o  0:cb9a9f314b8b (public) a
   
@@ -284,19 +290,19 @@
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ hg log -G
-  @  22:12e89af74238 (secret) k
+  @  24:12e89af74238 (secret) k
   |
-  o  21:636a8687b22e (secret) j
+  o  23:636a8687b22e (secret) j
   |
-  o  20:ccaf0a38653f (secret) i
+  o  22:ccaf0a38653f (secret) i
   |
-  o  19:11a89d1c2613 (draft) h
+  o  21:11a89d1c2613 (draft) h
   |
-  o  18:c1dec7ca82ea (draft) g
+  o  20:c1dec7ca82ea (draft) g
   |
-  o  17:087281e68428 (draft) f
+  o  19:087281e68428 (draft) f
   |
-  o  10:40db8afa467b (public) c
+  o  12:40db8afa467b (public) c
   |
   o  0:cb9a9f314b8b (public) a
   
@@ -333,19 +339,19 @@
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ hg log -G
-  @  22:12e89af74238 (secret) k
+  @  24:12e89af74238 (secret) k
   |
-  o  21:636a8687b22e (secret) j
+  o  23:636a8687b22e (secret) j
   |
-  o  20:ccaf0a38653f (secret) i
+  o  22:ccaf0a38653f (secret) i
   |
-  o  19:11a89d1c2613 (draft) h
+  o  21:11a89d1c2613 (draft) h
   |
-  o  18:c1dec7ca82ea (draft) g
+  o  20:c1dec7ca82ea (draft) g
   |
-  o  17:087281e68428 (draft) f
+  o  19:087281e68428 (draft) f
   |
-  o  10:40db8afa467b (public) c
+  o  12:40db8afa467b (public) c
   |
   o  0:cb9a9f314b8b (public) a
   
@@ -375,19 +381,19 @@
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ hg log -G
-  @  21:558246857888 (secret) k
+  @  23:558246857888 (secret) k
   |
-  o  20:28bd44768535 (secret) h
+  o  22:28bd44768535 (secret) h
   |
-  o  19:d5395202aeb9 (secret) i
+  o  21:d5395202aeb9 (secret) i
   |
-  o  18:21edda8e341b (secret) g
+  o  20:21edda8e341b (secret) g
   |
-  o  17:5ab64f3a4832 (secret) j
+  o  19:5ab64f3a4832 (secret) j
   |
-  o  11:b449568bf7fc (draft) f
+  o  13:b449568bf7fc (draft) f
   |
-  o  10:40db8afa467b (public) c
+  o  12:40db8afa467b (public) c
   |
   o  0:cb9a9f314b8b (public) a
   
@@ -428,33 +434,30 @@
   0 files updated, 0 files merged, 2 files removed, 0 files unresolved
   2 files updated, 0 files merged, 0 files removed, 0 files unresolved
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  saved backup bundle to $TESTTMP/folding/.hg/strip-backup/58019c66f35f-96092fce-backup.hg (glob)
-  saved backup bundle to $TESTTMP/folding/.hg/strip-backup/83d1858e070b-f3469cf8-backup.hg (glob)
-  saved backup bundle to $TESTTMP/folding/.hg/strip-backup/859969f5ed7e-d89a19d7-backup.hg (glob)
   $ hg log -G
-  @  19:f9daec13fb98 (secret) i
+  @  27:f9daec13fb98 (secret) i
   |
-  o  18:49807617f46a (secret) g
+  o  24:49807617f46a (secret) g
   |
-  o  17:050280826e04 (draft) h
+  o  21:050280826e04 (draft) h
   |
-  o  10:40db8afa467b (public) c
+  o  12:40db8afa467b (public) c
   |
   o  0:cb9a9f314b8b (public) a
   
-  $ hg co 18
+  $ hg co 49807617f46a
   0 files updated, 0 files merged, 2 files removed, 0 files unresolved
   $ echo wat >> wat
   $ hg add wat
   $ hg ci -m 'add wat'
   created new head
-  $ hg merge 19
+  $ hg merge f9daec13fb98
   2 files updated, 0 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
   $ hg ci -m 'merge'
   $ echo not wat > wat
   $ hg ci -m 'modify wat'
-  $ hg histedit 17
+  $ hg histedit 050280826e04
   abort: cannot edit history that contains merges
   [255]
   $ cd ..
--- a/tests/test-histedit-outgoing.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-histedit-outgoing.t	Tue Oct 20 15:59:10 2015 -0500
@@ -51,7 +51,7 @@
   #  f, fold = use commit, but combine it with the one above
   #  r, roll = like fold, but discard this commit's description
   #  d, drop = remove commit from history
-  #  m, mess = edit message without changing commit content
+  #  m, mess = edit commit message without changing commit content
   #
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ cd ..
@@ -83,7 +83,7 @@
   #  f, fold = use commit, but combine it with the one above
   #  r, roll = like fold, but discard this commit's description
   #  d, drop = remove commit from history
-  #  m, mess = edit message without changing commit content
+  #  m, mess = edit commit message without changing commit content
   #
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ cd ..
@@ -107,7 +107,7 @@
   #  f, fold = use commit, but combine it with the one above
   #  r, roll = like fold, but discard this commit's description
   #  d, drop = remove commit from history
-  #  m, mess = edit message without changing commit content
+  #  m, mess = edit commit message without changing commit content
   #
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
 
--- a/tests/test-hook.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-hook.t	Tue Oct 20 15:59:10 2015 -0500
@@ -223,8 +223,8 @@
   $ echo "update = printenv.py update" >> .hg/hgrc
   $ hg update
   preupdate hook: HG_PARENT1=539e4b31b6dc
+  update hook: HG_ERROR=0 HG_PARENT1=539e4b31b6dc
   2 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  update hook: HG_ERROR=0 HG_PARENT1=539e4b31b6dc
 
 pushkey hook
 
@@ -394,7 +394,7 @@
   $ cd "$TESTTMP/b"
 
   $ cat > hooktests.py <<EOF
-  > from mercurial import util
+  > from mercurial import error
   > 
   > uncallable = 0
   > 
@@ -421,7 +421,7 @@
   >     raise LocalException('exception from hook')
   > 
   > def aborthook(**args):
-  >     raise util.Abort('raise abort from hook')
+  >     raise error.Abort('raise abort from hook')
   > 
   > def brokenhook(**args):
   >     return 1 + {}
@@ -628,7 +628,7 @@
   Traceback (most recent call last):
   ImportError: No module named hgext_importfail
   Traceback (most recent call last):
-  Abort: precommit.importfail hook is invalid (import of "importfail" failed)
+  HookLoadError: precommit.importfail hook is invalid (import of "importfail" failed)
   abort: precommit.importfail hook is invalid (import of "importfail" failed)
 
 Issue1827: Hooks Update & Commit not completely post operation
@@ -644,8 +644,8 @@
   $ hg ci -ma
   223eafe2750c tip
   $ hg up 0 --config extensions.largefiles=
+  cb9a9f314b8b
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  cb9a9f314b8b
 
 make sure --verbose (and --quiet/--debug etc.) are propagated to the local ui
 that is passed to pre/post hooks
--- a/tests/test-i18n.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-i18n.t	Tue Oct 20 15:59:10 2015 -0500
@@ -1,8 +1,6 @@
-#require gettext
-
 (Translations are optional)
 
-#if no-outer-repo
+#if gettext no-outer-repo
 
 Test that translations are compiled and installed correctly.
 
@@ -27,6 +25,8 @@
 
 #endif
 
+#if gettext
+
 Test keyword search in translated help text:
 
   $ HGENCODING=UTF-8 LANGUAGE=de hg help -k blättern
@@ -38,6 +38,8 @@
   
    pager Verwendet einen externen Pager zum Bl\xc3\xa4ttern in der Ausgabe von Befehlen (esc)
 
+#endif
+
 Check Mercurial specific translation problems in each *.po files, and
 tool itself by doctest
 
--- a/tests/test-identify.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-identify.t	Tue Oct 20 15:59:10 2015 -0500
@@ -120,13 +120,13 @@
   $ echo fake >> .hg/requires
   $ hg id
   abort: repository requires features unknown to this Mercurial: fake!
-  (see http://mercurial.selenic.com/wiki/MissingRequirement for more information)
+  (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
   [255]
 
   $ cd ..
 #if no-outer-repo
   $ hg id test
   abort: repository requires features unknown to this Mercurial: fake!
-  (see http://mercurial.selenic.com/wiki/MissingRequirement for more information)
+  (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
   [255]
 #endif
--- a/tests/test-import.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-import.t	Tue Oct 20 15:59:10 2015 -0500
@@ -428,6 +428,215 @@
   working directory now based on revision 0
   $ hg --cwd b parents --template 'parent: {rev}\n'
   parent: 0
+
+Test that "hg rollback" doesn't restore dirstate to one at the
+beginning of the rollbacked transaction in not-"parent-gone" case.
+
+invoking pretxncommit hook will cause marking '.hg/dirstate' as a file
+to be restored at rollbacking, after DirstateTransactionPlan (see wiki
+page for detail).
+
+  $ hg --cwd b branch -q foobar
+  $ hg --cwd b commit -m foobar
+  $ hg --cwd b update 0 -q
+  $ hg --cwd b import ../patch1 ../patch2 --config hooks.pretxncommit=true
+  applying ../patch1
+  applying ../patch2
+  $ hg --cwd b update -q 1
+  $ hg --cwd b rollback -q
+  $ hg --cwd b parents --template 'parent: {rev}\n'
+  parent: 1
+
+  $ hg --cwd b update -q -C 0
+  $ hg --cwd b --config extensions.strip= strip -q 1
+
+Test visibility of in-memory distate changes inside transaction to
+external process
+
+  $ echo foo > a/foo
+  $ hg --cwd a commit -A -m 'adding foo' foo
+  $ hg --cwd a export -o '../patch%R' 3
+
+  $ cat > $TESTTMP/checkvisibility.sh <<EOF
+  > echo "===="
+  > hg parents --template "VISIBLE {rev}:{node|short}\n"
+  > hg status -amr
+  > # test that pending changes are hidden
+  > unset HG_PENDING
+  > hg parents --template "ACTUAL  {rev}:{node|short}\n"
+  > hg status -amr
+  > echo "===="
+  > EOF
+
+== test visibility to external editor
+
+  $ (cd b && sh "$TESTTMP/checkvisibility.sh")
+  ====
+  VISIBLE 0:80971e65b431
+  ACTUAL  0:80971e65b431
+  ====
+
+  $ HGEDITOR="sh $TESTTMP/checkvisibility.sh" hg --cwd b import -v --edit ../patch1 ../patch2 ../patch3
+  applying ../patch1
+  patching file a
+  ====
+  VISIBLE 0:80971e65b431
+  M a
+  ACTUAL  0:80971e65b431
+  M a
+  ====
+  committing files:
+  a
+  committing manifest
+  committing changelog
+  created 1d4bd90af0e4
+  applying ../patch2
+  patching file a
+  ====
+  VISIBLE 1:1d4bd90af0e4
+  M a
+  ACTUAL  0:80971e65b431
+  M a
+  ====
+  committing files:
+  a
+  committing manifest
+  committing changelog
+  created 6d019af21222
+  applying ../patch3
+  patching file foo
+  adding foo
+  ====
+  VISIBLE 2:6d019af21222
+  A foo
+  ACTUAL  0:80971e65b431
+  M a
+  ====
+  committing files:
+  foo
+  committing manifest
+  committing changelog
+  created 55e3f75b2378
+
+  $ hg --cwd b rollback -q
+
+(content of file "a" is already changed and it should be recognized as
+"M", even though dirstate is restored to one before "hg import")
+
+  $ (cd b && sh "$TESTTMP/checkvisibility.sh")
+  ====
+  VISIBLE 0:80971e65b431
+  M a
+  ACTUAL  0:80971e65b431
+  M a
+  ====
+  $ hg --cwd b revert --no-backup a
+  $ rm -f b/foo
+
+== test visibility to precommit external hook
+
+  $ cat >> b/.hg/hgrc <<EOF
+  > [hooks]
+  > precommit.visibility = sh $TESTTMP/checkvisibility.sh
+  > EOF
+
+  $ (cd b && sh "$TESTTMP/checkvisibility.sh")
+  ====
+  VISIBLE 0:80971e65b431
+  ACTUAL  0:80971e65b431
+  ====
+
+  $ hg --cwd b import ../patch1 ../patch2 ../patch3
+  applying ../patch1
+  ====
+  VISIBLE 0:80971e65b431
+  M a
+  ACTUAL  0:80971e65b431
+  M a
+  ====
+  applying ../patch2
+  ====
+  VISIBLE 1:1d4bd90af0e4
+  M a
+  ACTUAL  0:80971e65b431
+  M a
+  ====
+  applying ../patch3
+  ====
+  VISIBLE 2:6d019af21222
+  A foo
+  ACTUAL  0:80971e65b431
+  M a
+  ====
+
+  $ hg --cwd b rollback -q
+  $ (cd b && sh "$TESTTMP/checkvisibility.sh")
+  ====
+  VISIBLE 0:80971e65b431
+  M a
+  ACTUAL  0:80971e65b431
+  M a
+  ====
+  $ hg --cwd b revert --no-backup a
+  $ rm -f b/foo
+
+  $ cat >> b/.hg/hgrc <<EOF
+  > [hooks]
+  > precommit.visibility =
+  > EOF
+
+== test visibility to pretxncommit external hook
+
+  $ cat >> b/.hg/hgrc <<EOF
+  > [hooks]
+  > pretxncommit.visibility = sh $TESTTMP/checkvisibility.sh
+  > EOF
+
+  $ (cd b && sh "$TESTTMP/checkvisibility.sh")
+  ====
+  VISIBLE 0:80971e65b431
+  ACTUAL  0:80971e65b431
+  ====
+
+  $ hg --cwd b import ../patch1 ../patch2 ../patch3
+  applying ../patch1
+  ====
+  VISIBLE 0:80971e65b431
+  M a
+  ACTUAL  0:80971e65b431
+  M a
+  ====
+  applying ../patch2
+  ====
+  VISIBLE 1:1d4bd90af0e4
+  M a
+  ACTUAL  0:80971e65b431
+  M a
+  ====
+  applying ../patch3
+  ====
+  VISIBLE 2:6d019af21222
+  A foo
+  ACTUAL  0:80971e65b431
+  M a
+  ====
+
+  $ hg --cwd b rollback -q
+  $ (cd b && sh "$TESTTMP/checkvisibility.sh")
+  ====
+  VISIBLE 0:80971e65b431
+  M a
+  ACTUAL  0:80971e65b431
+  M a
+  ====
+  $ hg --cwd b revert --no-backup a
+  $ rm -f b/foo
+
+  $ cat >> b/.hg/hgrc <<EOF
+  > [hooks]
+  > pretxncommit.visibility =
+  > EOF
+
   $ rm -r b
 
 
@@ -1505,3 +1714,49 @@
   $ hg status -c .
   C a
   C b
+
+Importing some extra header
+===========================
+
+  $ cat > $TESTTMP/parseextra.py <<EOF
+  > import mercurial.patch
+  > import mercurial.cmdutil
+  > 
+  > def processfoo(repo, data, extra, opts):
+  >     if 'foo' in data:
+  >         extra['foo'] = data['foo']
+  > def postimport(ctx):
+  >     if 'foo' in ctx.extra():
+  >         ctx.repo().ui.write('imported-foo: %s\n' % ctx.extra()['foo'])
+  > 
+  > mercurial.patch.patchheadermap.append(('Foo', 'foo'))
+  > mercurial.cmdutil.extrapreimport.append('foo')
+  > mercurial.cmdutil.extrapreimportmap['foo'] = processfoo
+  > mercurial.cmdutil.extrapostimport.append('foo')
+  > mercurial.cmdutil.extrapostimportmap['foo'] = postimport
+  > EOF
+  $ printf "[extensions]\nparseextra=$TESTTMP/parseextra.py" >> $HGRCPATH
+  $ hg up -C tip
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cat > $TESTTMP/foo.patch <<EOF
+  > # HG changeset patch
+  > # User Rataxes
+  > # Date 0 0
+  > #      Thu Jan 01 00:00:00 1970 +0000
+  > # Foo bar
+  > height
+  > 
+  > --- a/a	Thu Jan 01 00:00:00 1970 +0000
+  > +++ b/a	Wed Oct 07 09:17:44 2015 +0000
+  > @@ -5,3 +5,4 @@
+  >  five
+  >  six
+  >  seven
+  > +heigt
+  > EOF
+  $ hg import $TESTTMP/foo.patch
+  applying $TESTTMP/foo.patch
+  imported-foo: bar
+  $ hg log --debug -r . | grep extra
+  extra:       branch=default
+  extra:       foo=bar
--- a/tests/test-issue1089.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-issue1089.t	Tue Oct 20 15:59:10 2015 -0500
@@ -1,4 +1,4 @@
-http://mercurial.selenic.com/bts/issue1089
+https://bz.mercurial-scm.org/1089
 
   $ hg init
   $ mkdir a
--- a/tests/test-issue1175.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-issue1175.t	Tue Oct 20 15:59:10 2015 -0500
@@ -1,4 +1,4 @@
-http://mercurial.selenic.com/bts/issue1175
+https://bz.mercurial-scm.org/1175
 
   $ hg init
   $ touch a
@@ -70,8 +70,7 @@
   $ hg graft 1
   grafting 1:5974126fad84 "b1"
   merging b
-  warning: conflicts during merge.
-  merging b incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
   abort: unresolved conflicts, can't continue
   (use hg resolve and hg graft --continue)
   [255]
--- a/tests/test-issue1306.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-issue1306.t	Tue Oct 20 15:59:10 2015 -0500
@@ -1,4 +1,4 @@
-http://mercurial.selenic.com/bts/issue1306
+https://bz.mercurial-scm.org/1306
 
 Initialize remote repo with branches:
 
--- a/tests/test-issue1438.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-issue1438.t	Tue Oct 20 15:59:10 2015 -0500
@@ -1,6 +1,6 @@
 #require symlink
 
-http://mercurial.selenic.com/bts/issue1438
+https://bz.mercurial-scm.org/1438
 
   $ hg init
 
--- a/tests/test-issue1502.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-issue1502.t	Tue Oct 20 15:59:10 2015 -0500
@@ -1,4 +1,4 @@
-http://mercurial.selenic.com/bts/issue1502
+https://bz.mercurial-scm.org/1502
 
 Initialize repository
 
--- a/tests/test-issue1877.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-issue1877.t	Tue Oct 20 15:59:10 2015 -0500
@@ -1,4 +1,4 @@
-http://mercurial.selenic.com/bts/issue1877
+https://bz.mercurial-scm.org/1877
 
   $ hg init a
   $ cd a
--- a/tests/test-issue2137.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-issue2137.t	Tue Oct 20 15:59:10 2015 -0500
@@ -1,4 +1,4 @@
-http://mercurial.selenic.com/bts/issue2137
+https://bz.mercurial-scm.org/2137
 
 Setup:
 
--- a/tests/test-issue522.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-issue522.t	Tue Oct 20 15:59:10 2015 -0500
@@ -1,4 +1,4 @@
-http://mercurial.selenic.com/bts/issue522
+https://bz.mercurial-scm.org/522
 
 In the merge below, the file "foo" has the same contents in both
 parents, but if we look at the file-level history, we'll notice that
--- a/tests/test-issue612.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-issue612.t	Tue Oct 20 15:59:10 2015 -0500
@@ -1,4 +1,4 @@
-http://mercurial.selenic.com/bts/issue612
+https://bz.mercurial-scm.org/612
 
   $ hg init
   $ mkdir src
--- a/tests/test-issue619.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-issue619.t	Tue Oct 20 15:59:10 2015 -0500
@@ -1,4 +1,4 @@
-http://mercurial.selenic.com/bts/issue619
+https://bz.mercurial-scm.org/619
 
   $ hg init
   $ echo a > a
--- a/tests/test-issue660.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-issue660.t	Tue Oct 20 15:59:10 2015 -0500
@@ -1,5 +1,5 @@
-http://mercurial.selenic.com/bts/issue660 and:
-http://mercurial.selenic.com/bts/issue322
+https://bz.mercurial-scm.org/660 and:
+https://bz.mercurial-scm.org/322
 
   $ hg init
   $ echo a > a
--- a/tests/test-issue672.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-issue672.t	Tue Oct 20 15:59:10 2015 -0500
@@ -1,4 +1,4 @@
-http://mercurial.selenic.com/bts/issue672
+https://bz.mercurial-scm.org/672
 
 # 0-2-4
 #  \ \ \
@@ -65,8 +65,8 @@
    branchmerge: True, force: False, partial: False
    ancestor: c64f439569a9, local: e327dca35ac8+, remote: 746e9549ea96
    preserving 1a for resolve of 1a
-   1a: local copied/moved from 1 -> m
-  picked tool 'internal:merge' for 1a (binary False symlink False)
+   1a: local copied/moved from 1 -> m (premerge)
+  picked tool ':merge' for 1a (binary False symlink False)
   merging 1a and 1 to 1a
   my 1a@e327dca35ac8+ other 1@746e9549ea96 ancestor 1@81f4b099af3d
    premerge successful
@@ -88,8 +88,8 @@
    ancestor: c64f439569a9, local: 746e9549ea96+, remote: e327dca35ac8
    preserving 1 for resolve of 1a
   removing 1
-   1a: remote moved from 1 -> m
-  picked tool 'internal:merge' for 1a (binary False symlink False)
+   1a: remote moved from 1 -> m (premerge)
+  picked tool ':merge' for 1a (binary False symlink False)
   merging 1 and 1a to 1a
   my 1a@746e9549ea96+ other 1a@e327dca35ac8 ancestor 1@81f4b099af3d
    premerge successful
--- a/tests/test-issue842.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-issue842.t	Tue Oct 20 15:59:10 2015 -0500
@@ -1,4 +1,4 @@
-http://mercurial.selenic.com/bts/issue842
+https://bz.mercurial-scm.org/842
 
   $ hg init
   $ echo foo > a
--- a/tests/test-keyword.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-keyword.t	Tue Oct 20 15:59:10 2015 -0500
@@ -1070,8 +1070,7 @@
   created new head
   $ hg merge
   merging m
-  warning: conflicts during merge.
-  merging m incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging m! (edit, then use 'hg resolve --mark')
   0 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
   [1]
--- a/tests/test-largefiles-misc.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-largefiles-misc.t	Tue Oct 20 15:59:10 2015 -0500
@@ -934,7 +934,7 @@
   $TESTTMP/individualenabling/enabledlocally (glob)
   $ hg -R notenabledlocally root
   abort: repository requires features unknown to this Mercurial: largefiles!
-  (see http://mercurial.selenic.com/wiki/MissingRequirement for more information)
+  (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
   [255]
 
   $ hg init push-dst
@@ -951,7 +951,7 @@
 
   $ hg clone enabledlocally clone-dst
   abort: repository requires features unknown to this Mercurial: largefiles!
-  (see http://mercurial.selenic.com/wiki/MissingRequirement for more information)
+  (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
   [255]
   $ test -d clone-dst
   [1]
--- a/tests/test-largefiles-update.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-largefiles-update.t	Tue Oct 20 15:59:10 2015 -0500
@@ -127,8 +127,7 @@
   keep (l)ocal e5bb990443d6a92aaf7223813720f7566c9dd05b or
   take (o)ther 58e24f733a964da346e2407a2bee99d9001184f5? o
   merging normal1
-  warning: conflicts during merge.
-  merging normal1 incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging normal1! (edit, then use 'hg resolve --mark')
   getting changed largefiles
   1 largefiles updated, 0 removed
   0 files updated, 1 files merged, 0 files removed, 1 files unresolved
@@ -141,6 +140,28 @@
   $ cat .hglf/large1
   58e24f733a964da346e2407a2bee99d9001184f5
 
+(merge non-existing largefiles from "other" via conflict prompt -
+make sure the following commit doesn't abort in a confusing way when trying to
+mark the non-existing file as normal in lfdirstate)
+
+  $ mv .hg/largefiles/58e24f733a964da346e2407a2bee99d9001184f5 .
+  $ hg update -q -C 3
+  $ hg merge --config largefiles.usercache=not --config debug.dirstate.delaywrite=2 --tool :local --config ui.interactive=True <<EOF
+  > o
+  > EOF
+  largefile large1 has a merge conflict
+  ancestor was 4669e532d5b2c093a78eca010077e708a071bb64
+  keep (l)ocal e5bb990443d6a92aaf7223813720f7566c9dd05b or
+  take (o)ther 58e24f733a964da346e2407a2bee99d9001184f5? o
+  getting changed largefiles
+  large1: largefile 58e24f733a964da346e2407a2bee99d9001184f5 not available from file:/*/$TESTTMP/repo (glob)
+  0 largefiles updated, 0 removed
+  0 files updated, 2 files merged, 0 files removed, 0 files unresolved
+  (branch merge, don't forget to commit)
+  $ hg commit -m '1-2-3 testing'
+  $ hg rollback -q
+  $ mv 58e24f733a964da346e2407a2bee99d9001184f5 .hg/largefiles/
+
 Test that "hg revert -r REV" updates largefiles from "REV" correctly
 
   $ hg update -q -C 3
@@ -555,8 +576,7 @@
   keep (l)ocal e5bb990443d6a92aaf7223813720f7566c9dd05b or
   take (o)ther 58e24f733a964da346e2407a2bee99d9001184f5? o
   merging normal1
-  warning: conflicts during merge.
-  merging normal1 incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging normal1! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see hg resolve, then hg rebase --continue)
   [1]
   $ cat .hglf/large1
--- a/tests/test-lfconvert.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-lfconvert.t	Tue Oct 20 15:59:10 2015 -0500
@@ -124,9 +124,9 @@
   $ hg commit -q -m"remove large, normal3"
   $ hg merge
   merging sub/maybelarge.dat and stuff/maybelarge.dat to stuff/maybelarge.dat
+  merging sub/normal2 and stuff/normal2 to stuff/normal2
   warning: $TESTTMP/bigfile-repo/stuff/maybelarge.dat looks like a binary file. (glob)
-  merging stuff/maybelarge.dat incomplete! (edit conflicts, then use 'hg resolve --mark')
-  merging sub/normal2 and stuff/normal2 to stuff/normal2
+  warning: conflicts while merging stuff/maybelarge.dat! (edit, then use 'hg resolve --mark')
   0 files updated, 1 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
   [1]
--- a/tests/test-lock-badness.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-lock-badness.t	Tue Oct 20 15:59:10 2015 -0500
@@ -14,7 +14,7 @@
 Test that raising an exception in the release function doesn't cause the lock to choke
 
   $ cat > testlock.py << EOF
-  > from mercurial import cmdutil, error, util
+  > from mercurial import cmdutil, error, error
   > 
   > cmdtable = {}
   > command = cmdutil.command(cmdtable)
@@ -22,7 +22,7 @@
   > def acquiretestlock(repo, releaseexc):
   >     def unlock():
   >         if releaseexc:
-  >             raise util.Abort('expected release exception')
+  >             raise error.Abort('expected release exception')
   >     l = repo._lock(repo.vfs, 'testlock', False, unlock, None, 'test lock')
   >     return l
   > 
@@ -35,7 +35,7 @@
   >         try:
   >             testlock = acquiretestlock(repo, False)
   >         except error.LockHeld:
-  >             raise util.Abort('lockfile on disk even after releasing!')
+  >             raise error.Abort('lockfile on disk even after releasing!')
   >         testlock.release()
   > EOF
   $ cat >> $HGRCPATH << EOF
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-lock.py	Tue Oct 20 15:59:10 2015 -0500
@@ -0,0 +1,271 @@
+from __future__ import absolute_import
+
+import copy
+import os
+import silenttestrunner
+import tempfile
+import types
+import unittest
+
+from mercurial import (
+    error,
+    lock,
+    scmutil,
+)
+
+testlockname = 'testlock'
+
+# work around http://bugs.python.org/issue1515
+if types.MethodType not in copy._deepcopy_dispatch:
+    def _deepcopy_method(x, memo):
+        return type(x)(x.im_func, copy.deepcopy(x.im_self, memo), x.im_class)
+    copy._deepcopy_dispatch[types.MethodType] = _deepcopy_method
+
+class lockwrapper(lock.lock):
+    def __init__(self, pidoffset, *args, **kwargs):
+        # lock.lock.__init__() calls lock(), so the pidoffset assignment needs
+        # to be earlier
+        self._pidoffset = pidoffset
+        super(lockwrapper, self).__init__(*args, **kwargs)
+    def _getpid(self):
+        return os.getpid() + self._pidoffset
+
+class teststate(object):
+    def __init__(self, testcase, dir, pidoffset=0):
+        self._testcase = testcase
+        self._acquirecalled = False
+        self._releasecalled = False
+        self._postreleasecalled = False
+        self.vfs = scmutil.vfs(dir, audit=False)
+        self._pidoffset = pidoffset
+
+    def makelock(self, *args, **kwargs):
+        l = lockwrapper(self._pidoffset, self.vfs, testlockname,
+                        releasefn=self.releasefn, acquirefn=self.acquirefn,
+                        *args, **kwargs)
+        l.postrelease.append(self.postreleasefn)
+        return l
+
+    def acquirefn(self):
+        self._acquirecalled = True
+
+    def releasefn(self):
+        self._releasecalled = True
+
+    def postreleasefn(self):
+        self._postreleasecalled = True
+
+    def assertacquirecalled(self, called):
+        self._testcase.assertEqual(
+            self._acquirecalled, called,
+            'expected acquire to be %s but was actually %s' % (
+                self._tocalled(called),
+                self._tocalled(self._acquirecalled),
+            ))
+
+    def resetacquirefn(self):
+        self._acquirecalled = False
+
+    def assertreleasecalled(self, called):
+        self._testcase.assertEqual(
+            self._releasecalled, called,
+            'expected release to be %s but was actually %s' % (
+                self._tocalled(called),
+                self._tocalled(self._releasecalled),
+            ))
+
+    def assertpostreleasecalled(self, called):
+        self._testcase.assertEqual(
+            self._postreleasecalled, called,
+            'expected postrelease to be %s but was actually %s' % (
+                self._tocalled(called),
+                self._tocalled(self._postreleasecalled),
+            ))
+
+    def assertlockexists(self, exists):
+        actual = self.vfs.lexists(testlockname)
+        self._testcase.assertEqual(
+            actual, exists,
+            'expected lock to %s but actually did %s' % (
+                self._toexists(exists),
+                self._toexists(actual),
+            ))
+
+    def _tocalled(self, called):
+        if called:
+            return 'called'
+        else:
+            return 'not called'
+
+    def _toexists(self, exists):
+        if exists:
+            return 'exist'
+        else:
+            return 'not exist'
+
+class testlock(unittest.TestCase):
+    def testlock(self):
+        state = teststate(self, tempfile.mkdtemp(dir=os.getcwd()))
+        lock = state.makelock()
+        state.assertacquirecalled(True)
+        lock.release()
+        state.assertreleasecalled(True)
+        state.assertpostreleasecalled(True)
+        state.assertlockexists(False)
+
+    def testrecursivelock(self):
+        state = teststate(self, tempfile.mkdtemp(dir=os.getcwd()))
+        lock = state.makelock()
+        state.assertacquirecalled(True)
+
+        state.resetacquirefn()
+        lock.lock()
+        # recursive lock should not call acquirefn again
+        state.assertacquirecalled(False)
+
+        lock.release() # brings lock refcount down from 2 to 1
+        state.assertreleasecalled(False)
+        state.assertpostreleasecalled(False)
+        state.assertlockexists(True)
+
+        lock.release() # releases the lock
+        state.assertreleasecalled(True)
+        state.assertpostreleasecalled(True)
+        state.assertlockexists(False)
+
+    def testlockfork(self):
+        state = teststate(self, tempfile.mkdtemp(dir=os.getcwd()))
+        lock = state.makelock()
+        state.assertacquirecalled(True)
+
+        # fake a fork
+        forklock = copy.deepcopy(lock)
+        forklock._pidoffset = 1
+        forklock.release()
+        state.assertreleasecalled(False)
+        state.assertpostreleasecalled(False)
+        state.assertlockexists(True)
+
+        # release the actual lock
+        lock.release()
+        state.assertreleasecalled(True)
+        state.assertpostreleasecalled(True)
+        state.assertlockexists(False)
+
+    def testinheritlock(self):
+        d = tempfile.mkdtemp(dir=os.getcwd())
+        parentstate = teststate(self, d)
+        parentlock = parentstate.makelock()
+        parentstate.assertacquirecalled(True)
+
+        # set up lock inheritance
+        with parentlock.inherit() as lockname:
+            parentstate.assertreleasecalled(True)
+            parentstate.assertpostreleasecalled(False)
+            parentstate.assertlockexists(True)
+
+            childstate = teststate(self, d, pidoffset=1)
+            childlock = childstate.makelock(parentlock=lockname)
+            childstate.assertacquirecalled(True)
+
+            childlock.release()
+            childstate.assertreleasecalled(True)
+            childstate.assertpostreleasecalled(False)
+            childstate.assertlockexists(True)
+
+            parentstate.resetacquirefn()
+
+        parentstate.assertacquirecalled(True)
+
+        parentlock.release()
+        parentstate.assertreleasecalled(True)
+        parentstate.assertpostreleasecalled(True)
+        parentstate.assertlockexists(False)
+
+    def testmultilock(self):
+        d = tempfile.mkdtemp(dir=os.getcwd())
+        state0 = teststate(self, d)
+        lock0 = state0.makelock()
+        state0.assertacquirecalled(True)
+
+        with lock0.inherit() as lock0name:
+            state0.assertreleasecalled(True)
+            state0.assertpostreleasecalled(False)
+            state0.assertlockexists(True)
+
+            state1 = teststate(self, d, pidoffset=1)
+            lock1 = state1.makelock(parentlock=lock0name)
+            state1.assertacquirecalled(True)
+
+            # from within lock1, acquire another lock
+            with lock1.inherit() as lock1name:
+                # since the file on disk is lock0's this should have the same
+                # name
+                self.assertEqual(lock0name, lock1name)
+
+                state2 = teststate(self, d, pidoffset=2)
+                lock2 = state2.makelock(parentlock=lock1name)
+                state2.assertacquirecalled(True)
+
+                lock2.release()
+                state2.assertreleasecalled(True)
+                state2.assertpostreleasecalled(False)
+                state2.assertlockexists(True)
+
+                state1.resetacquirefn()
+
+            state1.assertacquirecalled(True)
+
+            lock1.release()
+            state1.assertreleasecalled(True)
+            state1.assertpostreleasecalled(False)
+            state1.assertlockexists(True)
+
+        lock0.release()
+
+    def testinheritlockfork(self):
+        d = tempfile.mkdtemp(dir=os.getcwd())
+        parentstate = teststate(self, d)
+        parentlock = parentstate.makelock()
+        parentstate.assertacquirecalled(True)
+
+        # set up lock inheritance
+        with parentlock.inherit() as lockname:
+            childstate = teststate(self, d, pidoffset=1)
+            childlock = childstate.makelock(parentlock=lockname)
+            childstate.assertacquirecalled(True)
+
+            # fork the child lock
+            forkchildlock = copy.deepcopy(childlock)
+            forkchildlock._pidoffset += 1
+            forkchildlock.release()
+            childstate.assertreleasecalled(False)
+            childstate.assertpostreleasecalled(False)
+            childstate.assertlockexists(True)
+
+            # release the child lock
+            childlock.release()
+            childstate.assertreleasecalled(True)
+            childstate.assertpostreleasecalled(False)
+            childstate.assertlockexists(True)
+
+        parentlock.release()
+
+    def testinheritcheck(self):
+        d = tempfile.mkdtemp(dir=os.getcwd())
+        state = teststate(self, d)
+        def check():
+            raise error.LockInheritanceContractViolation('check failed')
+        lock = state.makelock(inheritchecker=check)
+        state.assertacquirecalled(True)
+
+        def tryinherit():
+            with lock.inherit():
+                pass
+
+        self.assertRaises(error.LockInheritanceContractViolation, tryinherit)
+
+        lock.release()
+
+if __name__ == '__main__':
+    silenttestrunner.main(__name__)
--- a/tests/test-log.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-log.t	Tue Oct 20 15:59:10 2015 -0500
@@ -620,6 +620,21 @@
   $ hg up -C 1
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ echo b1 > b1
+
+log -r "follow('set:clean()')"
+
+  $ hg log -r "follow('set:clean()')"
+  changeset:   0:67e992f2c4f3
+  user:        test
+  date:        Thu Jan 01 00:00:01 1970 +0000
+  summary:     base
+  
+  changeset:   1:3d5bf5654eda
+  user:        test
+  date:        Thu Jan 01 00:00:01 1970 +0000
+  summary:     r1
+  
+
   $ hg ci -Amb1 -d '1 0'
   adding b1
   created new head
@@ -646,7 +661,26 @@
   summary:     base
   
 
+log -r follow('glob:b*')
 
+  $ hg log -r "follow('glob:b*')"
+  changeset:   0:67e992f2c4f3
+  user:        test
+  date:        Thu Jan 01 00:00:01 1970 +0000
+  summary:     base
+  
+  changeset:   1:3d5bf5654eda
+  user:        test
+  date:        Thu Jan 01 00:00:01 1970 +0000
+  summary:     r1
+  
+  changeset:   3:e62f78d544b4
+  tag:         tip
+  parent:      1:3d5bf5654eda
+  user:        test
+  date:        Thu Jan 01 00:00:01 1970 +0000
+  summary:     b1
+  
 log -f -r '1 + 4'
 
   $ hg up -C 0
@@ -673,6 +707,16 @@
   date:        Thu Jan 01 00:00:01 1970 +0000
   summary:     base
   
+log -r "follow('set:grep(b2)')"
+
+  $ hg log -r "follow('set:grep(b2)')"
+  changeset:   4:ddb82e70d1a1
+  tag:         tip
+  parent:      0:67e992f2c4f3
+  user:        test
+  date:        Thu Jan 01 00:00:01 1970 +0000
+  summary:     b2
+  
 log -f -r null
 
   $ hg log -f -r null
@@ -1229,8 +1273,7 @@
   created new head
   $ hg merge 7
   merging foo
-  warning: conflicts during merge.
-  merging foo incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging foo! (edit, then use 'hg resolve --mark')
   0 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
   [1]
@@ -1241,8 +1284,7 @@
 
   $ hg merge 4
   merging foo
-  warning: conflicts during merge.
-  merging foo incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging foo! (edit, then use 'hg resolve --mark')
   1 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
   [1]
--- a/tests/test-merge-commit.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-merge-commit.t	Tue Oct 20 15:59:10 2015 -0500
@@ -72,8 +72,8 @@
    branchmerge: True, force: False, partial: False
    ancestor: 0f2ff26688b9, local: 2263c1be0967+, remote: 0555950ead28
    preserving bar for resolve of bar
-   bar: versions differ -> m
-  picked tool 'internal:merge' for bar (binary False symlink False)
+   bar: versions differ -> m (premerge)
+  picked tool ':merge' for bar (binary False symlink False)
   merging bar
   my bar@2263c1be0967+ other bar@0555950ead28 ancestor bar@0f2ff26688b9
    premerge successful
@@ -158,8 +158,8 @@
    branchmerge: True, force: False, partial: False
    ancestor: 0f2ff26688b9, local: 2263c1be0967+, remote: 3ffa6b9e35f0
    preserving bar for resolve of bar
-   bar: versions differ -> m
-  picked tool 'internal:merge' for bar (binary False symlink False)
+   bar: versions differ -> m (premerge)
+  picked tool ':merge' for bar (binary False symlink False)
   merging bar
   my bar@2263c1be0967+ other bar@3ffa6b9e35f0 ancestor bar@0f2ff26688b9
    premerge successful
--- a/tests/test-merge-criss-cross.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-merge-criss-cross.t	Tue Oct 20 15:59:10 2015 -0500
@@ -82,10 +82,13 @@
    preserving f2 for resolve of f2
    f1: remote is newer -> g
   getting f1
-   f2: versions differ -> m
-  picked tool 'internal:dump' for f2 (binary False symlink False)
+   f2: versions differ -> m (premerge)
+  picked tool ':dump' for f2 (binary False symlink False)
   merging f2
   my f2@3b08d01b0ab5+ other f2@adfe50279922 ancestor f2@40494bf2444c
+   f2: versions differ -> m (merge)
+  picked tool ':dump' for f2 (binary False symlink False)
+  my f2@3b08d01b0ab5+ other f2@adfe50279922 ancestor f2@40494bf2444c
   1 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
   [1]
--- a/tests/test-merge-default.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-merge-default.t	Tue Oct 20 15:59:10 2015 -0500
@@ -108,3 +108,10 @@
   5:a431fabd6039
   6:e88e33f3bf62
 
+Test experimental destination revset
+
+  $ hg log -r '_destmerge()'
+  abort: branch 'foobranch' has one head - please merge with an explicit rev
+  (run 'hg heads' to see all heads)
+  [255]
+
--- a/tests/test-merge-force.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-merge-force.t	Tue Oct 20 15:59:10 2015 -0500
@@ -192,38 +192,28 @@
   remote changed content1_content2_missing_missing-untracked which local deleted
   use (c)hanged version or leave (d)eleted? c
   merging content1_content2_content1_content4-tracked
-  warning: conflicts during merge.
-  merging content1_content2_content1_content4-tracked incomplete! (edit conflicts, then use 'hg resolve --mark')
   merging content1_content2_content2_content1-tracked
   merging content1_content2_content2_content4-tracked
-  warning: conflicts during merge.
-  merging content1_content2_content2_content4-tracked incomplete! (edit conflicts, then use 'hg resolve --mark')
   merging content1_content2_content3_content1-tracked
   merging content1_content2_content3_content3-tracked
-  warning: conflicts during merge.
-  merging content1_content2_content3_content3-tracked incomplete! (edit conflicts, then use 'hg resolve --mark')
   merging content1_content2_content3_content4-tracked
-  warning: conflicts during merge.
-  merging content1_content2_content3_content4-tracked incomplete! (edit conflicts, then use 'hg resolve --mark')
   merging content1_content2_missing_content1-tracked
   merging content1_content2_missing_content4-tracked
-  warning: conflicts during merge.
-  merging content1_content2_missing_content4-tracked incomplete! (edit conflicts, then use 'hg resolve --mark')
   merging missing_content2_content2_content4-tracked
-  warning: conflicts during merge.
-  merging missing_content2_content2_content4-tracked incomplete! (edit conflicts, then use 'hg resolve --mark')
   merging missing_content2_content3_content3-tracked
-  warning: conflicts during merge.
-  merging missing_content2_content3_content3-tracked incomplete! (edit conflicts, then use 'hg resolve --mark')
   merging missing_content2_content3_content4-tracked
-  warning: conflicts during merge.
-  merging missing_content2_content3_content4-tracked incomplete! (edit conflicts, then use 'hg resolve --mark')
   merging missing_content2_missing_content4-tracked
-  warning: conflicts during merge.
-  merging missing_content2_missing_content4-tracked incomplete! (edit conflicts, then use 'hg resolve --mark')
   merging missing_content2_missing_content4-untracked
-  warning: conflicts during merge.
-  merging missing_content2_missing_content4-untracked incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging content1_content2_content1_content4-tracked! (edit, then use 'hg resolve --mark')
+  warning: conflicts while merging content1_content2_content2_content4-tracked! (edit, then use 'hg resolve --mark')
+  warning: conflicts while merging content1_content2_content3_content3-tracked! (edit, then use 'hg resolve --mark')
+  warning: conflicts while merging content1_content2_content3_content4-tracked! (edit, then use 'hg resolve --mark')
+  warning: conflicts while merging content1_content2_missing_content4-tracked! (edit, then use 'hg resolve --mark')
+  warning: conflicts while merging missing_content2_content2_content4-tracked! (edit, then use 'hg resolve --mark')
+  warning: conflicts while merging missing_content2_content3_content3-tracked! (edit, then use 'hg resolve --mark')
+  warning: conflicts while merging missing_content2_content3_content4-tracked! (edit, then use 'hg resolve --mark')
+  warning: conflicts while merging missing_content2_missing_content4-tracked! (edit, then use 'hg resolve --mark')
+  warning: conflicts while merging missing_content2_missing_content4-untracked! (edit, then use 'hg resolve --mark')
   39 files updated, 3 files merged, 8 files removed, 10 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
   [1]
--- a/tests/test-merge-internal-tools-pattern.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-merge-internal-tools-pattern.t	Tue Oct 20 15:59:10 2015 -0500
@@ -1,5 +1,6 @@
-Make sure that the internal merge tools (internal:fail, internal:local, and
-internal:other) are used when matched by a merge-pattern in hgrc
+Make sure that the internal merge tools (internal:fail, internal:local,
+internal:union and internal:other) are used when matched by a
+merge-pattern in hgrc
 
 Make sure HGMERGE doesn't interfere with the test:
 
@@ -110,3 +111,31 @@
   $ hg stat
   M f
 
+Merge using internal:union tool:
+
+  $ hg update -C 2
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+  $ echo "line 4a" >>f
+  $ hg ci -Am "Adding fourth line (commit 4)"
+  $ hg update 2
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+  $ echo "line 4b" >>f
+  $ hg ci -Am "Adding fourth line v2 (commit 5)"
+  created new head
+
+  $ echo "[merge-patterns]" > .hg/hgrc
+  $ echo "* = internal:union" >> .hg/hgrc
+
+  $ hg merge 3
+  merging f
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  (branch merge, don't forget to commit)
+
+  $ cat f
+  line 1
+  line 2
+  third line
+  line 4b
+  line 4a
--- a/tests/test-merge-local.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-merge-local.t	Tue Oct 20 15:59:10 2015 -0500
@@ -62,8 +62,7 @@
   $ hg co 0
   merging zzz1_merge_ok
   merging zzz2_merge_bad
-  warning: conflicts during merge.
-  merging zzz2_merge_bad incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging zzz2_merge_bad! (edit, then use 'hg resolve --mark')
   2 files updated, 1 files merged, 3 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges
   [1]
@@ -87,8 +86,7 @@
   $ hg co
   merging zzz1_merge_ok
   merging zzz2_merge_bad
-  warning: conflicts during merge.
-  merging zzz2_merge_bad incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging zzz2_merge_bad! (edit, then use 'hg resolve --mark')
   3 files updated, 1 files merged, 2 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges
   [1]
@@ -96,8 +94,7 @@
   $ hg co 0
   merging zzz1_merge_ok
   merging zzz2_merge_bad
-  warning: conflicts during merge.
-  merging zzz2_merge_bad incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging zzz2_merge_bad! (edit, then use 'hg resolve --mark')
   2 files updated, 1 files merged, 3 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges
   [1]
--- a/tests/test-merge-revert2.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-merge-revert2.t	Tue Oct 20 15:59:10 2015 -0500
@@ -44,8 +44,7 @@
 
   $ hg update
   merging file1
-  warning: conflicts during merge.
-  merging file1 incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging file1! (edit, then use 'hg resolve --mark')
   0 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges
   [1]
--- a/tests/test-merge-tools.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-merge-tools.t	Tue Oct 20 15:59:10 2015 -0500
@@ -65,10 +65,9 @@
 override $PATH to ensure hgmerge not visible; use $PYTHON in case we're
 running from a devel copy, not a temp installation
 
-  $ PATH="$BINDIR" $PYTHON "$BINDIR"/hg merge -r 2
+  $ PATH="$BINDIR:/usr/sbin" $PYTHON "$BINDIR"/hg merge -r 2
   merging f
-  warning: conflicts during merge.
-  merging f incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging f! (edit, then use 'hg resolve --mark')
   0 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
   [1]
@@ -111,10 +110,9 @@
 
   $ echo "echo fail" > false
   $ hg up -qC 1
-  $ PATH="`pwd`:$BINDIR" $PYTHON "$BINDIR"/hg merge -r 2
+  $ PATH="`pwd`:$BINDIR:/usr/sbin" $PYTHON "$BINDIR"/hg merge -r 2
   merging f
-  warning: conflicts during merge.
-  merging f incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging f! (edit, then use 'hg resolve --mark')
   0 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
   [1]
@@ -126,10 +124,9 @@
 
   $ mkdir false
   $ hg up -qC 1
-  $ PATH="`pwd`:$BINDIR" $PYTHON "$BINDIR"/hg merge -r 2
+  $ PATH="`pwd`:$BINDIR:/usr/sbin" $PYTHON "$BINDIR"/hg merge -r 2
   merging f
-  warning: conflicts during merge.
-  merging f incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging f! (edit, then use 'hg resolve --mark')
   0 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
   [1]
@@ -196,6 +193,26 @@
   M f
   ? f.orig
 
+or true set to disabled:
+  $ beforemerge
+  [merge-tools]
+  false.whatever=
+  true.priority=1
+  # hg update -C 1
+  $ hg merge -r 2 --config merge-tools.true.disabled=yes
+  merging f
+  merging f failed!
+  0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+  use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+  [1]
+  $ aftermerge
+  # cat f
+  revision 1
+  space
+  # hg stat
+  M f
+  ? f.orig
+
 or true.executable not found in PATH:
 
   $ beforemerge
@@ -351,6 +368,7 @@
   $ hg merge -r 2 --config merge-patterns.f=true --config merge-tools.true.executable=nonexistentmergetool
   couldn't find merge tool true specified for f
   merging f
+  couldn't find merge tool true specified for f
   merging f failed!
   0 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
@@ -374,6 +392,7 @@
   $ hg merge -r 2 --config merge-patterns.f=true --config merge-tools.true.executable=/nonexistent/mergetool
   couldn't find merge tool true specified for f
   merging f
+  couldn't find merge tool true specified for f
   merging f failed!
   0 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
@@ -1022,7 +1041,7 @@
   $ hg merge -r 2 --tool internal:merge
   merging f
   warning: internal :merge cannot merge symlinks for f
-  merging f incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging f! (edit, then use 'hg resolve --mark')
   0 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
   [1]
--- a/tests/test-merge-types.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-merge-types.t	Tue Oct 20 15:59:10 2015 -0500
@@ -35,12 +35,12 @@
    branchmerge: True, force: False, partial: False
    ancestor: c334dc3be0da, local: 521a1e40188f+, remote: 3574f3e69b1c
    preserving a for resolve of a
-   a: versions differ -> m
-  picked tool 'internal:merge' for a (binary False symlink True)
+   a: versions differ -> m (premerge)
+  picked tool ':merge' for a (binary False symlink True)
   merging a
   my a@521a1e40188f+ other a@3574f3e69b1c ancestor a@c334dc3be0da
   warning: internal :merge cannot merge symlinks for a
-  merging a incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
   0 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
   [1]
@@ -62,18 +62,41 @@
   $ hg update -C 1
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 
-  $ hg merge --debug
+  $ hg merge --debug --tool :union
     searching for copies back to rev 1
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: c334dc3be0da, local: 3574f3e69b1c+, remote: 521a1e40188f
    preserving a for resolve of a
-   a: versions differ -> m
-  picked tool 'internal:merge' for a (binary False symlink True)
+   a: versions differ -> m (premerge)
+  picked tool ':union' for a (binary False symlink True)
   merging a
   my a@3574f3e69b1c+ other a@521a1e40188f ancestor a@c334dc3be0da
-  warning: internal :merge cannot merge symlinks for a
-  merging a incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: internal :union cannot merge symlinks for a
+  warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
+  0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+  use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+  [1]
+
+  $ tellmeabout a
+  a is an executable file with content:
+  a
+
+  $ hg update -C 1
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+  $ hg merge --debug --tool :merge3
+    searching for copies back to rev 1
+  resolving manifests
+   branchmerge: True, force: False, partial: False
+   ancestor: c334dc3be0da, local: 3574f3e69b1c+, remote: 521a1e40188f
+   preserving a for resolve of a
+   a: versions differ -> m (premerge)
+  picked tool ':merge3' for a (binary False symlink True)
+  merging a
+  my a@3574f3e69b1c+ other a@521a1e40188f ancestor a@c334dc3be0da
+  warning: internal :merge3 cannot merge symlinks for a
+  warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
   0 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
   [1]
@@ -101,7 +124,7 @@
    branchmerge: False, force: False, partial: False
    ancestor: c334dc3be0da, local: c334dc3be0da+, remote: 521a1e40188f
    preserving a for resolve of a
-   a: versions differ -> m
+   a: versions differ -> m (premerge)
   (couldn't find merge tool hgmerge|tool hgmerge can't handle symlinks) (re)
   picked tool ':prompt' for a (binary False symlink True)
    no tool found to merge a
@@ -136,7 +159,7 @@
   $ hg merge
   merging f
   warning: internal :merge cannot merge symlinks for f
-  merging f incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging f! (edit, then use 'hg resolve --mark')
   0 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
   [1]
@@ -148,7 +171,7 @@
   $ hg merge
   merging f
   warning: internal :merge cannot merge symlinks for f
-  merging f incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging f! (edit, then use 'hg resolve --mark')
   0 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
   [1]
@@ -175,7 +198,7 @@
   $ hg merge
   merging f
   warning: internal :merge cannot merge symlinks for f
-  merging f incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging f! (edit, then use 'hg resolve --mark')
   0 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
   [1]
@@ -187,7 +210,7 @@
   $ hg merge
   merging f
   warning: internal :merge cannot merge symlinks for f
-  merging f incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging f! (edit, then use 'hg resolve --mark')
   0 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
   [1]
@@ -260,22 +283,20 @@
 
   $ hg merge
   merging a
-  warning: conflicts during merge.
-  merging a incomplete! (edit conflicts, then use 'hg resolve --mark')
   warning: cannot merge flags for b
   merging b
-  warning: conflicts during merge.
-  merging b incomplete! (edit conflicts, then use 'hg resolve --mark')
   warning: cannot merge flags for c
   merging d
   warning: internal :merge cannot merge symlinks for d
-  merging d incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging d! (edit, then use 'hg resolve --mark')
   merging f
   warning: internal :merge cannot merge symlinks for f
-  merging f incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging f! (edit, then use 'hg resolve --mark')
   merging h
   warning: internal :merge cannot merge symlinks for h
-  merging h incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging h! (edit, then use 'hg resolve --mark')
+  warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
+  warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
   3 files updated, 0 files merged, 0 files removed, 5 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
   [1]
@@ -321,22 +342,20 @@
   $ hg up -Cqr1
   $ hg merge
   merging a
-  warning: conflicts during merge.
-  merging a incomplete! (edit conflicts, then use 'hg resolve --mark')
   warning: cannot merge flags for b
   merging b
-  warning: conflicts during merge.
-  merging b incomplete! (edit conflicts, then use 'hg resolve --mark')
   warning: cannot merge flags for c
   merging d
   warning: internal :merge cannot merge symlinks for d
-  merging d incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging d! (edit, then use 'hg resolve --mark')
   merging f
   warning: internal :merge cannot merge symlinks for f
-  merging f incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging f! (edit, then use 'hg resolve --mark')
   merging h
   warning: internal :merge cannot merge symlinks for h
-  merging h incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging h! (edit, then use 'hg resolve --mark')
+  warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
+  warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
   3 files updated, 0 files merged, 0 files removed, 5 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
   [1]
--- a/tests/test-merge1.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-merge1.t	Tue Oct 20 15:59:10 2015 -0500
@@ -214,10 +214,10 @@
   $ cat > $TESTTMP/abort.py <<EOF
   > # emulate aborting before "recordupdates()". in this case, files
   > # are changed without updating dirstate
-  > from mercurial import extensions, merge, util
+  > from mercurial import extensions, merge, error
   > def applyupdates(orig, *args, **kwargs):
   >     orig(*args, **kwargs)
-  >     raise util.Abort('intentional aborting')
+  >     raise error.Abort('intentional aborting')
   > def extsetup(ui):
   >     extensions.wrapfunction(merge, "applyupdates", applyupdates)
   > EOF
--- a/tests/test-merge7.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-merge7.t	Tue Oct 20 15:59:10 2015 -0500
@@ -44,8 +44,7 @@
   (run 'hg heads' to see heads, 'hg merge' to merge)
   $ hg merge
   merging test.txt
-  warning: conflicts during merge.
-  merging test.txt incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging test.txt! (edit, then use 'hg resolve --mark')
   0 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
   [1]
@@ -85,12 +84,14 @@
    branchmerge: True, force: False, partial: False
    ancestor: 96b70246a118, local: 50c3a7e29886+, remote: 40d11a4173a8
    preserving test.txt for resolve of test.txt
-   test.txt: versions differ -> m
-  picked tool 'internal:merge' for test.txt (binary False symlink False)
+   test.txt: versions differ -> m (premerge)
+  picked tool ':merge' for test.txt (binary False symlink False)
   merging test.txt
   my test.txt@50c3a7e29886+ other test.txt@40d11a4173a8 ancestor test.txt@96b70246a118
-  warning: conflicts during merge.
-  merging test.txt incomplete! (edit conflicts, then use 'hg resolve --mark')
+   test.txt: versions differ -> m (merge)
+  picked tool ':merge' for test.txt (binary False symlink False)
+  my test.txt@50c3a7e29886+ other test.txt@40d11a4173a8 ancestor test.txt@96b70246a118
+  warning: conflicts while merging test.txt! (edit, then use 'hg resolve --mark')
   0 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
   [1]
--- a/tests/test-merge9.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-merge9.t	Tue Oct 20 15:59:10 2015 -0500
@@ -27,8 +27,8 @@
 test with the rename on the remote side
   $ HGMERGE=false hg merge
   merging bar
+  merging foo and baz to baz
   merging bar failed!
-  merging foo and baz to baz
   1 files updated, 1 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
   [1]
@@ -41,8 +41,8 @@
   3 files updated, 0 files merged, 1 files removed, 0 files unresolved
   $ HGMERGE=false hg merge
   merging bar
+  merging baz and foo to baz
   merging bar failed!
-  merging baz and foo to baz
   1 files updated, 1 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
   [1]
@@ -77,14 +77,13 @@
 resolve all warning
   $ hg resolve
   abort: no files or directories specified
-  (use --all to remerge all files)
+  (use --all to re-merge all unresolved files)
   [255]
 
 resolve all
   $ hg resolve -a
   merging bar
-  warning: conflicts during merge.
-  merging bar incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging bar! (edit, then use 'hg resolve --mark')
   [1]
 
 after
--- a/tests/test-module-imports.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-module-imports.t	Tue Oct 20 15:59:10 2015 -0500
@@ -112,22 +112,5 @@
 these may expose other cycles.
 
   $ hg locate 'mercurial/**.py' 'hgext/**.py' | sed 's-\\-/-g' | python "$import_checker" -
-  mercurial/dispatch.py mixed imports
-     stdlib:    commands
-     relative:  error, extensions, fancyopts, hg, hook, util
-  mercurial/fileset.py mixed imports
-     stdlib:    parser
-     relative:  error, merge, util
-  mercurial/revset.py mixed imports
-     stdlib:    parser
-     relative:  error, hbisect, phases, util
-  mercurial/templater.py mixed imports
-     stdlib:    parser
-     relative:  config, error, templatefilters, templatekw, util
-  mercurial/ui.py mixed imports
-     stdlib:    formatter
-     relative:  config, error, progress, scmutil, util
-  Import cycle: mercurial.cmdutil -> mercurial.context -> mercurial.subrepo -> mercurial.cmdutil
   Import cycle: hgext.largefiles.basestore -> hgext.largefiles.localstore -> hgext.largefiles.basestore
-  Import cycle: mercurial.commands -> mercurial.commandserver -> mercurial.dispatch -> mercurial.commands
   [1]
--- a/tests/test-mq-eol.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-mq-eol.t	Tue Oct 20 15:59:10 2015 -0500
@@ -61,7 +61,7 @@
   1 out of 1 hunks FAILED -- saving rejects to file a.rej
   patch failed, unable to continue (try -v)
   patch failed, rejects left in working directory
-  errors during apply, please fix and refresh eol.diff
+  errors during apply, please fix and qrefresh eol.diff
   [2]
   $ hg qpop
   popping eol.diff
@@ -73,7 +73,7 @@
   applying eol.diff
   patch failed, unable to continue (try -v)
   patch failed, rejects left in working directory
-  errors during apply, please fix and refresh eol.diff
+  errors during apply, please fix and qrefresh eol.diff
   [2]
   $ hg qpop
   popping eol.diff
@@ -170,7 +170,7 @@
   1 out of 1 hunks FAILED -- saving rejects to file a.rej
   patch failed, unable to continue (try -v)
   patch failed, rejects left in working directory
-  errors during apply, please fix and refresh patch1
+  errors during apply, please fix and qrefresh patch1
   [2]
   $ hg qpop
   popping patch1
@@ -193,7 +193,7 @@
   1 out of 1 hunks FAILED -- saving rejects to file a.rej
   patch failed, unable to continue (try -v)
   patch failed, rejects left in working directory
-  errors during apply, please fix and refresh patch1
+  errors during apply, please fix and qrefresh patch1
   [2]
   $ hg qpop
   popping patch1
--- a/tests/test-mq-missingfiles.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-mq-missingfiles.t	Tue Oct 20 15:59:10 2015 -0500
@@ -45,7 +45,7 @@
   2 out of 2 hunks FAILED -- saving rejects to file b.rej
   patch failed, unable to continue (try -v)
   patch failed, rejects left in working directory
-  errors during apply, please fix and refresh changeb
+  errors during apply, please fix and qrefresh changeb
   [2]
 
 Display added files:
@@ -98,7 +98,7 @@
   b not tracked!
   patch failed, unable to continue (try -v)
   patch failed, rejects left in working directory
-  errors during apply, please fix and refresh changebb
+  errors during apply, please fix and qrefresh changebb
   [2]
   $ cat a
   a
@@ -150,7 +150,7 @@
   1 out of 1 hunks FAILED -- saving rejects to file b.rej
   patch failed, unable to continue (try -v)
   patch failed, rejects left in working directory
-  errors during apply, please fix and refresh changeb
+  errors during apply, please fix and qrefresh changeb
   [2]
   $ hg st
   ? b.rej
--- a/tests/test-mq-qdelete.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-mq-qdelete.t	Tue Oct 20 15:59:10 2015 -0500
@@ -155,11 +155,11 @@
   $ hg init --mq
   $ hg qimport -r 3
   $ hg qpop
-  popping 3.diff
+  popping imported_patch_pc
   patch queue now empty
-  $ hg qdel -k 3.diff
-  $ hg qimp -e 3.diff
-  adding 3.diff to series file
+  $ hg qdel -k imported_patch_pc
+  $ hg qimp -e imported_patch_pc
+  adding imported_patch_pc to series file
   $ hg qfinish -a
   no patches applied
 
@@ -167,17 +167,17 @@
 resilience to inconsistency: qfinish -a with applied patches not in series
 
   $ hg qser
-  3.diff
+  imported_patch_pc
   $ hg qapplied
   $ hg qpush
-  applying 3.diff
-  patch 3.diff is empty
-  now at: 3.diff
+  applying imported_patch_pc
+  patch imported_patch_pc is empty
+  now at: imported_patch_pc
   $ echo next >>  base
   $ hg qrefresh -d '1 0'
   $ echo > .hg/patches/series # remove 3.diff from series to confuse mq
   $ hg qfinish -a
-  revision 47dfa8501675 refers to unknown patches: 3.diff
+  revision 47dfa8501675 refers to unknown patches: imported_patch_pc
 
 more complex state 'both known and unknown patches
 
--- a/tests/test-mq-qfold.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-mq-qfold.t	Tue Oct 20 15:59:10 2015 -0500
@@ -59,7 +59,7 @@
 
   $ echo d >> a
   $ hg qfold p3
-  abort: local changes found, refresh first
+  abort: local changes found, qrefresh first
   [255]
 
   $ hg diff -c .
@@ -149,11 +149,11 @@
   $ hg qrefresh -m "original message"
 
   $ cat > $TESTTMP/commitfailure.py <<EOF
-  > from mercurial import util
+  > from mercurial import error
   > def reposetup(ui, repo):
   >     class commitfailure(repo.__class__):
   >         def commit(self, *args, **kwargs):
-  >             raise util.Abort('emulating unexpected abort')
+  >             raise error.Abort('emulating unexpected abort')
   >     repo.__class__ = commitfailure
   > EOF
 
@@ -181,7 +181,7 @@
 
   $ rm -f .hg/last-message.txt
   $ HGEDITOR="sh $TESTTMP/editor.sh" hg qfold -e -m MESSAGE p3
-  refresh interrupted while patch was popped! (revert --all, qpush to recover)
+  qrefresh interrupted while patch was popped! (revert --all, qpush to recover)
   abort: emulating unexpected abort
   [255]
   $ test -f .hg/last-message.txt
@@ -232,7 +232,7 @@
   transaction abort!
   rollback completed
   note: commit message saved in .hg/last-message.txt
-  refresh interrupted while patch was popped! (revert --all, qpush to recover)
+  qrefresh interrupted while patch was popped! (revert --all, qpush to recover)
   abort: pretxncommit.unexpectedabort hook exited with status 1
   [255]
   $ cat .hg/last-message.txt
--- a/tests/test-mq-qgoto.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-mq-qgoto.t	Tue Oct 20 15:59:10 2015 -0500
@@ -63,7 +63,7 @@
   $ echo f >> c
 
   $ hg qgoto 1
-  abort: local changes found, refresh first
+  abort: local changes found, qrefresh first
   [255]
   $ hg qgoto 1 -f
   popping bug141421
--- a/tests/test-mq-qimport.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-mq-qimport.t	Tue Oct 20 15:59:10 2015 -0500
@@ -198,32 +198,32 @@
   now at: appendbar.diff
   $ hg qfin -a
   patch b.diff finalized without changeset message
-  $ touch .hg/patches/2.diff
+  $ touch .hg/patches/append_foo
   $ hg qimport -r 'p1(.)::'
-  abort: patch "2.diff" already exists
+  abort: patch "append_foo" already exists
   [255]
   $ hg qapplied
-  3.diff
+  append_bar
   $ hg qfin -a
-  $ rm .hg/patches/2.diff
+  $ rm .hg/patches/append_foo
   $ hg qimport -r 'p1(.)::' -P
   $ hg qpop -a
-  popping 3.diff
-  popping 2.diff
+  popping append_bar
+  popping append_foo
   patch queue now empty
-  $ hg qdel 3.diff
-  $ hg qdel -k 2.diff
+  $ hg qdel append_foo
+  $ hg qdel -k append_bar
 
 qimport -e
 
-  $ hg qimport -e 2.diff
-  adding 2.diff to series file
-  $ hg qdel -k 2.diff
+  $ hg qimport -e append_bar
+  adding append_bar to series file
+  $ hg qdel -k append_bar
 
 qimport -e --name newname oldexisitingpatch
 
-  $ hg qimport -e --name this-name-is-better 2.diff
-  renaming 2.diff to this-name-is-better
+  $ hg qimport -e --name this-name-is-better append_bar
+  renaming append_bar to this-name-is-better
   adding this-name-is-better to series file
   $ hg qser
   this-name-is-better
--- a/tests/test-mq-qnew.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-mq-qnew.t	Tue Oct 20 15:59:10 2015 -0500
@@ -156,8 +156,7 @@
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   created new head
   merging a
-  warning: conflicts during merge.
-  merging a incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
   0 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
   (no more unresolved files)
@@ -234,8 +233,7 @@
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   created new head
   merging a
-  warning: conflicts during merge.
-  merging a incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
   0 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
   (no more unresolved files)
@@ -248,11 +246,11 @@
   $ cd repo
 
   $ cat > $TESTTMP/commitfailure.py <<EOF
-  > from mercurial import util
+  > from mercurial import error
   > def reposetup(ui, repo):
   >     class commitfailure(repo.__class__):
   >         def commit(self, *args, **kwargs):
-  >             raise util.Abort('emulating unexpected abort')
+  >             raise error.Abort('emulating unexpected abort')
   >     repo.__class__ = commitfailure
   > EOF
   $ cat >> .hg/hgrc <<EOF
--- a/tests/test-mq-qpush-exact.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-mq-qpush-exact.t	Tue Oct 20 15:59:10 2015 -0500
@@ -204,7 +204,7 @@
   1 out of 1 hunks FAILED -- saving rejects to file fp0.rej
   patch failed, unable to continue (try -v)
   patch failed, rejects left in working directory
-  errors during apply, please fix and refresh p0
+  errors during apply, please fix and qrefresh p0
   [2]
   $ cat fp0
   cp0-bad
@@ -231,7 +231,7 @@
   1 out of 1 hunks FAILED -- saving rejects to file fp1.rej
   patch failed, unable to continue (try -v)
   patch failed, rejects left in working directory
-  errors during apply, please fix and refresh p1
+  errors during apply, please fix and qrefresh p1
   [2]
   $ cat fp1
   cp1-bad
--- a/tests/test-mq-qpush-fail.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-mq-qpush-fail.t	Tue Oct 20 15:59:10 2015 -0500
@@ -56,7 +56,9 @@
   applying bad-patch
   transaction abort!
   rollback completed
-  cleaning up working directory...done
+  cleaning up working directory...
+  reverting foo
+  done
   abort: decoding near '\xe9': 'ascii' codec can't decode byte 0xe9 in position 0: ordinal not in range(128)! (esc)
   [255]
   $ hg parents
@@ -231,22 +233,22 @@
   [255]
   $ echo a >> a
   $ hg qpop --keep-changes
-  abort: local changes found, refresh first
+  abort: local changes found, qrefresh first
   [255]
   $ hg revert -qa a
   $ rm a
   $ hg qpop --keep-changes
-  abort: local changes found, refresh first
+  abort: local changes found, qrefresh first
   [255]
   $ hg rm -A a
   $ hg qpop --keep-changes
-  abort: local changes found, refresh first
+  abort: local changes found, qrefresh first
   [255]
   $ hg revert -qa a
   $ echo b > b
   $ hg add b
   $ hg qpop --keep-changes
-  abort: local changes found, refresh first
+  abort: local changes found, qrefresh first
   [255]
   $ hg forget b
   $ echo d > d
@@ -301,7 +303,7 @@
   committing manifest
   committing changelog
   patch failed, rejects left in working directory
-  errors during apply, please fix and refresh p3
+  errors during apply, please fix and qrefresh p3
   [2]
   $ cat a.orig
   a
@@ -447,7 +449,7 @@
   now at: p2
   $ hg st b
   $ hg --config mq.keepchanges=1 qpush --exact
-  abort: local changes found, refresh first
+  abort: local changes found, qrefresh first
   [255]
   $ hg revert -qa a
   $ hg qpop
--- a/tests/test-mq-qrefresh-replace-log-message.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-mq-qrefresh-replace-log-message.t	Tue Oct 20 15:59:10 2015 -0500
@@ -2,6 +2,13 @@
 
   $ echo "[extensions]" >> $HGRCPATH
   $ echo "mq=" >> $HGRCPATH
+  $ cat >> $HGRCPATH <<EOF
+  > [defaults]
+  > # explicit date to commit with fixed hashid
+  > qnew = -d "0 0"
+  > qrefresh = -d "0 0"
+  > qfold = -d "0 0"
+  > EOF
   $ hg init
   $ hg qinit
 
@@ -108,11 +115,11 @@
   > EOF
 
   $ cat > $TESTTMP/commitfailure.py <<EOF
-  > from mercurial import util
+  > from mercurial import error
   > def reposetup(ui, repo):
   >     class commitfailure(repo.__class__):
   >         def commit(self, *args, **kwargs):
-  >             raise util.Abort('emulating unexpected abort')
+  >             raise error.Abort('emulating unexpected abort')
   >     repo.__class__ = commitfailure
   > EOF
 
@@ -132,7 +139,7 @@
 
   $ rm -f .hg/last-message.txt
   $ HGEDITOR="sh $TESTTMP/editor.sh" hg qrefresh -e
-  refresh interrupted while patch was popped! (revert --all, qpush to recover)
+  qrefresh interrupted while patch was popped! (revert --all, qpush to recover)
   abort: emulating unexpected abort
   [255]
   $ test -f .hg/last-message.txt
@@ -181,7 +188,7 @@
   transaction abort!
   rollback completed
   note: commit message saved in .hg/last-message.txt
-  refresh interrupted while patch was popped! (revert --all, qpush to recover)
+  qrefresh interrupted while patch was popped! (revert --all, qpush to recover)
   abort: pretxncommit.unexpectedabort hook exited with status 1
   [255]
   $ cat .hg/last-message.txt
@@ -191,3 +198,129 @@
   
   
   test saving last-message.txt
+
+Test visibility of in-memory distate changes outside transaction to
+external process
+
+  $ cat > $TESTTMP/checkvisibility.sh <<EOF
+  > echo "===="
+  > hg parents --template "{rev}:{node|short}\n"
+  > hg status -arm
+  > echo "===="
+  > EOF
+
+== test visibility to external editor
+
+  $ hg update -C -q first-patch
+  $ rm -f file2
+  $ hg qpush -q second-patch --config hooks.pretxncommit.unexpectedabort=
+  now at: second-patch
+  $ echo bbbb >> file2
+
+  $ sh "$TESTTMP/checkvisibility.sh"
+  ====
+  1:e30108269082
+  M file2
+  ====
+
+  $ HGEDITOR='sh "$TESTTMP/checkvisibility.sh"' hg qrefresh -e
+  ====
+  0:25e397dabed2
+  A file2
+  ====
+  transaction abort!
+  rollback completed
+  note: commit message saved in .hg/last-message.txt
+  qrefresh interrupted while patch was popped! (revert --all, qpush to recover)
+  abort: pretxncommit.unexpectedabort hook exited with status 1
+  [255]
+
+(rebuilding at failure of qrefresh bases on rev #0, and it causes
+dropping status of "file2")
+
+  $ sh "$TESTTMP/checkvisibility.sh"
+  ====
+  0:25e397dabed2
+  ====
+
+== test visibility to precommit external hook
+
+  $ hg update -C -q
+  $ rm -f file2
+  $ hg qpush -q second-patch --config hooks.pretxncommit.unexpectedabort=
+  now at: second-patch
+  $ echo bbbb >> file2
+
+  $ cat >> .hg/hgrc <<EOF
+  > [hooks]
+  > precommit.checkvisibility = sh "$TESTTMP/checkvisibility.sh"
+  > EOF
+
+  $ sh "$TESTTMP/checkvisibility.sh"
+  ====
+  1:e30108269082
+  M file2
+  ====
+
+  $ hg qrefresh
+  ====
+  0:25e397dabed2
+  A file2
+  ====
+  transaction abort!
+  rollback completed
+  qrefresh interrupted while patch was popped! (revert --all, qpush to recover)
+  abort: pretxncommit.unexpectedabort hook exited with status 1
+  [255]
+
+  $ sh "$TESTTMP/checkvisibility.sh"
+  ====
+  0:25e397dabed2
+  ====
+
+  $ cat >> .hg/hgrc <<EOF
+  > [hooks]
+  > precommit.checkvisibility =
+  > EOF
+
+== test visibility to pretxncommit external hook
+
+  $ hg update -C -q
+  $ rm -f file2
+  $ hg qpush -q second-patch --config hooks.pretxncommit.unexpectedabort=
+  now at: second-patch
+  $ echo bbbb >> file2
+
+  $ cat >> .hg/hgrc <<EOF
+  > [hooks]
+  > pretxncommit.checkvisibility = sh "$TESTTMP/checkvisibility.sh"
+  > # make checkvisibility run before unexpectedabort
+  > priority.pretxncommit.checkvisibility = 10
+  > EOF
+
+  $ sh "$TESTTMP/checkvisibility.sh"
+  ====
+  1:e30108269082
+  M file2
+  ====
+
+  $ hg qrefresh
+  ====
+  0:25e397dabed2
+  A file2
+  ====
+  transaction abort!
+  rollback completed
+  qrefresh interrupted while patch was popped! (revert --all, qpush to recover)
+  abort: pretxncommit.unexpectedabort hook exited with status 1
+  [255]
+
+  $ sh "$TESTTMP/checkvisibility.sh"
+  ====
+  0:25e397dabed2
+  ====
+
+  $ cat >> .hg/hgrc <<EOF
+  > [hooks]
+  > pretxncommit.checkvisibility =
+  > EOF
--- a/tests/test-mq-qrefresh.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-mq-qrefresh.t	Tue Oct 20 15:59:10 2015 -0500
@@ -503,7 +503,7 @@
   > bar'
   transaction abort!
   rollback completed
-  refresh interrupted while patch was popped! (revert --all, qpush to recover)
+  qrefresh interrupted while patch was popped! (revert --all, qpush to recover)
   abort: username 'foo\nbar' contains a newline!
   [255]
   $ rm a
@@ -519,7 +519,7 @@
   $ hg qrefresh -u ' '
   transaction abort!
   rollback completed
-  refresh interrupted while patch was popped! (revert --all, qpush to recover)
+  qrefresh interrupted while patch was popped! (revert --all, qpush to recover)
   abort: empty username!
   [255]
   $ cat .hg/patches/a
--- a/tests/test-mq-safety.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-mq-safety.t	Tue Oct 20 15:59:10 2015 -0500
@@ -25,7 +25,7 @@
   $ hg phase --public qbase
   $ echo babar >> foo
   $ hg qref
-  abort: cannot refresh public revision
+  abort: cannot qrefresh public revision
   (see "hg help phases" for details)
   [255]
   $ hg revert -a
@@ -35,7 +35,7 @@
   (see "hg help phases" for details)
   [255]
   $ hg qfold bar
-  abort: cannot refresh public revision
+  abort: cannot qrefresh public revision
   (see "hg help phases" for details)
   [255]
   $ hg revert -a
@@ -81,7 +81,7 @@
   abort: popping would remove a revision not managed by this patch queue
   [255]
   $ hg qrefresh
-  abort: cannot refresh a revision with children
+  abort: cannot qrefresh a revision with children
   [255]
   $ hg tip --template '{rev} {desc}\n'
   3 append quux
--- a/tests/test-mq-subrepo-svn.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-mq-subrepo-svn.t	Tue Oct 20 15:59:10 2015 -0500
@@ -27,10 +27,7 @@
 #endif
 
   $ mkdir -p svn-project-2499/trunk
-  $ svn import -m 'init project' svn-project-2499 "$SVNREPOURL"
-  Adding         svn-project-2499/trunk (glob)
-  
-  Committed revision 1.
+  $ svn import -qm 'init project' svn-project-2499 "$SVNREPOURL"
 
 qnew on repo w/svn subrepo
   $ mkrepo repo-2499-svn-subrepo
--- a/tests/test-mq-subrepo.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-mq-subrepo.t	Tue Oct 20 15:59:10 2015 -0500
@@ -243,14 +243,14 @@
   $ hg -R sub update 0000
   0 files updated, 0 files merged, 1 files removed, 0 files unresolved
   $ hg qpop
-  abort: local changed subrepos found, refresh first
+  abort: local changed subrepos found, qrefresh first
   [255]
   $ hg revert sub
   reverting subrepo sub
   adding sub/a (glob)
   $ hg qpop
-  popping 1.diff
-  now at: 0.diff
+  popping 1
+  now at: 0
   $ hg status -AS
   C .hgsub
   C .hgsubstate
@@ -262,17 +262,17 @@
   $ hg -R sub update 0000
   0 files updated, 0 files merged, 1 files removed, 0 files unresolved
   $ hg qpush
-  abort: local changed subrepos found, refresh first
+  abort: local changed subrepos found, qrefresh first
   [255]
   $ hg revert sub
   reverting subrepo sub
   adding sub/a (glob)
   $ hg qpush
-  applying 1.diff
+  applying 1
    subrepository sub diverged (local revision: b2fdb12cd82b, remote revision: aa037b301eba)
   (M)erge, keep (l)ocal or keep (r)emote? m
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  now at: 1.diff
+  now at: 1
   $ hg status -AS
   C .hgsub
   C .hgsubstate
--- a/tests/test-mq.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-mq.t	Tue Oct 20 15:59:10 2015 -0500
@@ -39,7 +39,7 @@
   
   By default, mq will automatically use git patches when required to avoid
   losing file mode changes, copy records, binary files or empty files creations
-  or deletions. This behaviour can be configured with:
+  or deletions. This behavior can be configured with:
   
     [mq]
     git = auto/keep/yes/no
@@ -869,7 +869,7 @@
   1 out of 1 hunks FAILED -- saving rejects to file foo.rej
   patch failed, unable to continue (try -v)
   patch failed, rejects left in working directory
-  errors during apply, please fix and refresh bar
+  errors during apply, please fix and qrefresh bar
   [2]
   $ hg st
   ? foo
--- a/tests/test-obsolete-tag-cache.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-obsolete-tag-cache.t	Tue Oct 20 15:59:10 2015 -0500
@@ -68,10 +68,10 @@
   55482a6fb4b1881fa8f746fd52cf6f096bb21c89 test1
 
   $ hg blackbox -l 4
-  1970/01/01 00:00:00 bob> tags
-  1970/01/01 00:00:00 bob> 2/2 cache hits/lookups in * seconds (glob)
-  1970/01/01 00:00:00 bob> writing .hg/cache/tags2-visible with 2 tags
-  1970/01/01 00:00:00 bob> tags exited 0 after * seconds (glob)
+  1970/01/01 00:00:00 bob (*)> tags (glob)
+  1970/01/01 00:00:00 bob (*)> 2/2 cache hits/lookups in * seconds (glob)
+  1970/01/01 00:00:00 bob (*)> writing .hg/cache/tags2-visible with 2 tags (glob)
+  1970/01/01 00:00:00 bob (*)> tags exited 0 after * seconds (glob)
 
 Hiding another changeset should cause the filtered hash to change
 
@@ -87,10 +87,10 @@
   042eb6bfcc4909bad84a1cbf6eb1ddf0ab587d41 head2
 
   $ hg blackbox -l 4
-  1970/01/01 00:00:00 bob> tags
-  1970/01/01 00:00:00 bob> 1/1 cache hits/lookups in * seconds (glob)
-  1970/01/01 00:00:00 bob> writing .hg/cache/tags2-visible with 1 tags
-  1970/01/01 00:00:00 bob> tags exited 0 after * seconds (glob)
+  1970/01/01 00:00:00 bob (*)> tags (glob)
+  1970/01/01 00:00:00 bob (*)> 1/1 cache hits/lookups in * seconds (glob)
+  1970/01/01 00:00:00 bob (*)> writing .hg/cache/tags2-visible with 1 tags (glob)
+  1970/01/01 00:00:00 bob (*)> tags exited 0 after * seconds (glob)
 
 Resolving tags on an unfiltered repo writes a separate tags cache
 
@@ -107,7 +107,7 @@
   d75775ffbc6bca1794d300f5571272879bd280da test2
 
   $ hg blackbox -l 4
-  1970/01/01 00:00:00 bob> --hidden tags
-  1970/01/01 00:00:00 bob> 2/2 cache hits/lookups in * seconds (glob)
-  1970/01/01 00:00:00 bob> writing .hg/cache/tags2 with 3 tags
-  1970/01/01 00:00:00 bob> --hidden tags exited 0 after * seconds (glob)
+  1970/01/01 00:00:00 bob (*)> --hidden tags (glob)
+  1970/01/01 00:00:00 bob (*)> 2/2 cache hits/lookups in * seconds (glob)
+  1970/01/01 00:00:00 bob (*)> writing .hg/cache/tags2 with 3 tags (glob)
+  1970/01/01 00:00:00 bob (*)> --hidden tags exited 0 after * seconds (glob)
--- a/tests/test-parseindex.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-parseindex.t	Tue Oct 20 15:59:10 2015 -0500
@@ -60,9 +60,66 @@
 
   $ cd ..
 
-Test corrupted p1/p2 fields that could cause SEGV at parsers.c:
+#if no-pure
+
+Test SEGV caused by bad revision passed to reachableroots() (issue4775):
+
+  $ cd a
 
-#if no-pure
+  $ python <<EOF
+  > from mercurial import changelog, scmutil
+  > cl = changelog.changelog(scmutil.vfs('.hg/store'))
+  > print 'good heads:'
+  > for head in [0, len(cl) - 1, -1]:
+  >     print'%s: %r' % (head, cl.reachableroots(0, [head], [0]))
+  > print 'bad heads:'
+  > for head in [len(cl), 10000, -2, -10000, None]:
+  >     print '%s:' % head,
+  >     try:
+  >         cl.reachableroots(0, [head], [0])
+  >         print 'uncaught buffer overflow?'
+  >     except (IndexError, TypeError) as inst:
+  >         print inst
+  > print 'good roots:'
+  > for root in [0, len(cl) - 1, -1]:
+  >     print '%s: %r' % (root, cl.reachableroots(root, [len(cl) - 1], [root]))
+  > print 'out-of-range roots are ignored:'
+  > for root in [len(cl), 10000, -2, -10000]:
+  >     print '%s: %r' % (root, cl.reachableroots(root, [len(cl) - 1], [root]))
+  > print 'bad roots:'
+  > for root in [None]:
+  >     print '%s:' % root,
+  >     try:
+  >         cl.reachableroots(root, [len(cl) - 1], [root])
+  >         print 'uncaught error?'
+  >     except TypeError as inst:
+  >         print inst
+  > EOF
+  good heads:
+  0: [0]
+  1: [0]
+  -1: []
+  bad heads:
+  2: head out of range
+  10000: head out of range
+  -2: head out of range
+  -10000: head out of range
+  None: an integer is required
+  good roots:
+  0: [0]
+  1: [1]
+  -1: [-1]
+  out-of-range roots are ignored:
+  2: []
+  10000: []
+  -2: []
+  -10000: []
+  bad roots:
+  None: an integer is required
+
+  $ cd ..
+
+Test corrupted p1/p2 fields that could cause SEGV at parsers.c:
 
   $ mkdir invalidparent
   $ cd invalidparent
@@ -94,6 +151,8 @@
   > cl = changelog.changelog(scmutil.vfs(sys.argv[1]))
   > n0, n1 = cl.node(0), cl.node(1)
   > ops = [
+  >     ('reachableroots',
+  >      lambda: cl.index.reachableroots2(0, [1], [0], False)),
   >     ('compute_phases_map_sets', lambda: cl.computephases([[0], []])),
   >     ('index_headrevs', lambda: cl.headrevs()),
   >     ('find_gca_candidates', lambda: cl.commonancestorsheads(n0, n1)),
@@ -109,11 +168,13 @@
   > EOF
 
   $ python test.py limit/.hg/store
+  reachableroots: parent out of range
   compute_phases_map_sets: parent out of range
   index_headrevs: parent out of range
   find_gca_candidates: parent out of range
   find_deepest: parent out of range
   $ python test.py segv/.hg/store
+  reachableroots: parent out of range
   compute_phases_map_sets: parent out of range
   index_headrevs: parent out of range
   find_gca_candidates: parent out of range
--- a/tests/test-patchbomb.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-patchbomb.t	Tue Oct 20 15:59:10 2015 -0500
@@ -356,6 +356,48 @@
   Q70eyNw=
   --===*=-- (glob)
 
+with a specific bundle type
+(binary part must be different)
+
+  $ hg email --date '1970-1-1 0:3' -n -f quux -t foo \
+  >  -c bar -s test -r tip -b --desc description \
+  > --config patchbomb.bundletype=gzip | $FILTERBOUNDARY
+  searching for changes
+  1 changesets found
+  
+  displaying test ...
+  Content-Type: multipart/mixed; boundary="===*==" (glob)
+  MIME-Version: 1.0
+  Subject: test
+  Message-Id: <patchbomb.180@*> (glob)
+  User-Agent: Mercurial-patchbomb/* (glob)
+  Date: Thu, 01 Jan 1970 00:03:00 +0000
+  From: quux
+  To: foo
+  Cc: bar
+  
+  --===*= (glob)
+  Content-Type: text/plain; charset="us-ascii"
+  MIME-Version: 1.0
+  Content-Transfer-Encoding: 7bit
+  
+  a multiline
+  
+  description
+  
+  --===*= (glob)
+  Content-Type: application/x-mercurial-bundle
+  MIME-Version: 1.0
+  Content-Disposition: attachment; filename="bundle.hg"
+  Content-Transfer-Encoding: base64
+  
+  SEcxMEdaeJxjYGBY8V9n/iLGbtFfJZuNk/euDCpWfrRy/vTrevFCx1/4t7J5LdeL0ix0Opx3kwEL
+  wKYXKqUJwqnG5sYWSWmmJsaWlqYWaRaWJpaWiWamZpYWRgZGxolJiabmSQbmZqlcQMV6QGwCxGzG
+  CgZcySARUyA2A2LGZKiZ3Y+Lu786z4z4MWXmsrAZCsqrl1az5y21PMcjpbThzWeXGT+/nutbmvvz
+  zXYS3BoGxdrJDIYmlimJJiZpRokmqYYmaSYWFknmSSkmhqbmliamiZYWxuYmBhbJBgZcUBNZQe5K
+  Epm7xF/LT+RLx/a9juFTomaYO/Rgsx4rwBN+IMCUDLOKAQBrsmti
+  --===============*==-- (glob)
+
 utf-8 patch:
   $ $PYTHON -c 'fp = open("utf", "wb"); fp.write("h\xC3\xB6mma!\n"); fp.close();'
   $ hg commit -A -d '4 0' -m 'utf-8 content'
@@ -2800,3 +2842,37 @@
    d
   +d
   
+Test pull url header
+=================================
+
+basic version
+
+  $ echo 'intro=auto' >> $HGRCPATH
+  $ echo "publicurl=$TESTTMP/t2" >> $HGRCPATH
+  $ hg email --date '1980-1-1 0:1' -n -t foo -s test -r '10' | grep '^#'
+  abort: public url $TESTTMP/t2 is missing 3b6f1ec9dde9
+  (use "hg push $TESTTMP/t2 -r 3b6f1ec9dde9")
+  [1]
+
+remote missing
+
+  $ echo 'publicurl=$TESTTMP/missing' >> $HGRCPATH
+  $ hg email --date '1980-1-1 0:1' -n -t foo -s test -r '10'
+  unable to access public repo: $TESTTMP/missing
+  abort: repository $TESTTMP/missing not found!
+  [255]
+
+node missing at remote
+
+  $ hg clone -r '9' . ../t3
+  adding changesets
+  adding manifests
+  adding file changes
+  added 3 changesets with 3 changes to 3 files
+  updating to branch test
+  3 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ echo 'publicurl=$TESTTMP/t3' >> $HGRCPATH
+  $ hg email --date '1980-1-1 0:1' -n -t foo -s test -r '10'
+  abort: public url $TESTTMP/t3 is missing 3b6f1ec9dde9
+  (use "hg push $TESTTMP/t3 -r 3b6f1ec9dde9")
+  [255]
--- a/tests/test-pathencode.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-pathencode.py	Tue Oct 20 15:59:10 2015 -0500
@@ -2,7 +2,7 @@
 # time it is invoked, and tests the encoding of those pathnames.
 #
 # It uses a simple probabilistic model to generate valid pathnames
-# that have proven likely to expose bugs and divergent behaviour in
+# that have proven likely to expose bugs and divergent behavior in
 # different encoding implementations.
 
 from mercurial import store
--- a/tests/test-profile.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-profile.t	Tue Oct 20 15:59:10 2015 -0500
@@ -14,6 +14,9 @@
   $ hg --profile --config profiling.output=../out st
   $ grep CallCount ../out > /dev/null || cat ../out
 
+  $ hg --profile --config profiling.output=blackbox --config extensions.blackbox= st
+  $ grep CallCount .hg/blackbox.log > /dev/null || cat .hg/blackbox.log
+
   $ hg --profile --config profiling.format=text st 2>../out
   $ grep CallCount ../out > /dev/null || cat ../out
 
--- a/tests/test-pull.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-pull.t	Tue Oct 20 15:59:10 2015 -0500
@@ -52,6 +52,18 @@
   $ hg rollback --dry-run --verbose
   repository tip rolled back to revision -1 (undo pull: http://foo:***@localhost:$HGPORT/)
 
+Test pull of non-existing 20 character revision specification, making sure plain ascii identifiers
+not are encoded like a node:
+
+  $ hg pull -r 'xxxxxxxxxxxxxxxxxxxy'
+  pulling from http://foo@localhost:$HGPORT/
+  abort: unknown revision 'xxxxxxxxxxxxxxxxxxxy'!
+  [255]
+  $ hg pull -r 'xxxxxxxxxxxxxxxxxx y'
+  pulling from http://foo@localhost:$HGPORT/
+  abort: unknown revision '7878787878787878787878787878787878782079'!
+  [255]
+
 Issue622: hg init && hg pull -u URL doesn't checkout default branch
 
   $ cd ..
--- a/tests/test-push-http-bundle1.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-push-http-bundle1.t	Tue Oct 20 15:59:10 2015 -0500
@@ -114,6 +114,36 @@
   $ hg rollback
   repository tip rolled back to revision 0 (undo serve)
 
+expect success, pre-d1b16a746db6 server supports the unbundle capability, but
+has no parameter
+
+  $ cat <<EOF > notcapable-unbundleparam.py
+  > from mercurial import extensions, httppeer
+  > def capable(orig, self, name):
+  >     if name == 'unbundle':
+  >         return True
+  >     return orig(self, name)
+  > def uisetup(ui):
+  >     extensions.wrapfunction(httppeer.httppeer, 'capable', capable)
+  > EOF
+  $ cp $HGRCPATH $HGRCPATH.orig
+  $ cat <<EOF >> $HGRCPATH
+  > [extensions]
+  > notcapable-unbundleparam = `pwd`/notcapable-unbundleparam.py
+  > EOF
+  $ req
+  pushing to http://localhost:$HGPORT/
+  searching for changes
+  remote: adding changesets
+  remote: adding manifests
+  remote: adding file changes
+  remote: added 1 changesets with 1 changes to 1 files
+  remote: changegroup hook: * (glob)
+  % serve errors
+  $ hg rollback
+  repository tip rolled back to revision 0 (undo serve)
+  $ mv $HGRCPATH.orig $HGRCPATH
+
 expect push success, phase change failure
 
   $ cat > .hg/hgrc <<EOF
--- a/tests/test-rebase-abort.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-rebase-abort.t	Tue Oct 20 15:59:10 2015 -0500
@@ -64,8 +64,7 @@
   rebasing 3:3163e20567cc "L1"
   rebasing 4:46f0b057b5c0 "L2" (tip)
   merging common
-  warning: conflicts during merge.
-  merging common incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging common! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see hg resolve, then hg rebase --continue)
   [1]
 
@@ -94,8 +93,7 @@
   rebasing 3:3163e20567cc "L1"
   rebasing 4:46f0b057b5c0 "L2" (tip)
   merging common
-  warning: conflicts during merge.
-  merging common incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging common! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see hg resolve, then hg rebase --continue)
   [1]
 
@@ -165,8 +163,7 @@
   note: rebase of 3:a6484957d6b9 created no changes to commit
   rebasing 4:145842775fec "C1" (tip)
   merging c
-  warning: conflicts during merge.
-  merging c incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging c! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see hg resolve, then hg rebase --continue)
   [1]
 
@@ -225,8 +222,7 @@
   $ hg rebase -d master -r foo
   rebasing 3:6c0f977a22d8 "C" (tip foo)
   merging c
-  warning: conflicts during merge.
-  merging c incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging c! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see hg resolve, then hg rebase --continue)
   [1]
   $ hg rebase --abort
@@ -322,3 +318,37 @@
   commit: (clean)
   update: 1 new changesets, 2 branch heads (merge)
   phases: 4 draft
+
+test aborting a rebase succeeds after rebasing with skipped commits onto a
+public changeset (issue4896)
+
+  $ hg init succeedonpublic
+  $ cd succeedonpublic
+  $ echo 'content' > root
+  $ hg commit -A -m 'root' -q
+
+set up public branch
+  $ echo 'content' > disappear
+  $ hg commit -A -m 'disappear public' -q
+commit will cause merge conflict on rebase
+  $ echo '' > root
+  $ hg commit -m 'remove content public' -q
+  $ hg phase --public
+
+setup the draft branch that will be rebased onto public commit
+  $ hg up -r 0 -q
+  $ echo 'content' > disappear
+commit will disappear
+  $ hg commit -A -m 'disappear draft' -q
+  $ echo 'addedcontADDEDentadded' > root
+commit will cause merge conflict on rebase
+  $ hg commit -m 'add content draft' -q
+
+  $ hg rebase -d 'public()' --tool :merge -q
+  note: rebase of 3:0682fd3dabf5 created no changes to commit
+  warning: conflicts while merging root! (edit, then use 'hg resolve --mark')
+  unresolved conflicts (see hg resolve, then hg rebase --continue)
+  [1]
+  $ hg rebase --abort
+  rebase aborted
+
--- a/tests/test-rebase-bookmarks.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-rebase-bookmarks.t	Tue Oct 20 15:59:10 2015 -0500
@@ -170,8 +170,7 @@
   $ hg rebase
   rebasing 3:3d5fa227f4b5 "C" (Y Z)
   merging c
-  warning: conflicts during merge.
-  merging c incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging c! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see hg resolve, then hg rebase --continue)
   [1]
   $ echo 'c' > c
--- a/tests/test-rebase-check-restore.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-rebase-check-restore.t	Tue Oct 20 15:59:10 2015 -0500
@@ -68,8 +68,7 @@
   rebasing 1:27547f69f254 "B"
   rebasing 2:965c486023db "C"
   merging A
-  warning: conflicts during merge.
-  merging A incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging A! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see hg resolve, then hg rebase --continue)
   [1]
 
@@ -125,8 +124,7 @@
   $ hg rebase -s 5 -d 4 --keepbranches
   rebasing 5:01e6ebbd8272 "F" (tip)
   merging A
-  warning: conflicts during merge.
-  merging A incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging A! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see hg resolve, then hg rebase --continue)
   [1]
 
--- a/tests/test-rebase-collapse.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-rebase-collapse.t	Tue Oct 20 15:59:10 2015 -0500
@@ -60,11 +60,8 @@
   > EOF
   $ HGEDITOR="sh $TESTTMP/editor.sh" hg rebase --collapse --keepbranches -e
   rebasing 1:42ccdea3bb16 "B"
-  note: rebase of 1:42ccdea3bb16 created no changes to commit
   rebasing 2:5fddd98957c8 "C"
-  note: rebase of 2:5fddd98957c8 created no changes to commit
   rebasing 3:32af7686d403 "D"
-  note: rebase of 3:32af7686d403 created no changes to commit
   ==== before editing
   Collapsed revision
   * B
@@ -120,9 +117,7 @@
   $ hg phase --force --secret 6
   $ hg rebase --source 4 --collapse
   rebasing 4:9520eea781bc "E"
-  note: rebase of 4:9520eea781bc created no changes to commit
   rebasing 6:eea13746799a "G"
-  note: rebase of 6:eea13746799a created no changes to commit
   saved backup bundle to $TESTTMP/a2/.hg/strip-backup/9520eea781bc-fcd8edd4-backup.hg (glob)
 
   $ hg tglog
@@ -164,9 +159,7 @@
   > EOF
   $ HGEDITOR="sh $TESTTMP/checkeditform.sh" hg rebase --source 4 --collapse -m 'custom message' -e
   rebasing 4:9520eea781bc "E"
-  note: rebase of 4:9520eea781bc created no changes to commit
   rebasing 6:eea13746799a "G"
-  note: rebase of 6:eea13746799a created no changes to commit
   HGEDITFORM=rebase.collapse
   saved backup bundle to $TESTTMP/a3/.hg/strip-backup/9520eea781bc-fcd8edd4-backup.hg (glob)
 
@@ -276,11 +269,8 @@
 
   $ hg rebase -s 4 --collapse # root (4) is not a merge
   rebasing 4:8a5212ebc852 "E"
-  note: rebase of 4:8a5212ebc852 created no changes to commit
   rebasing 5:7f219660301f "F"
-  note: rebase of 5:7f219660301f created no changes to commit
   rebasing 6:c772a8b2dc17 "G"
-  note: rebase of 6:c772a8b2dc17 created no changes to commit
   saved backup bundle to $TESTTMP/b1/.hg/strip-backup/8a5212ebc852-75046b61-backup.hg (glob)
 
   $ hg tglog
@@ -430,14 +420,10 @@
 
   $ hg rebase -s 4 --collapse # root (4) is not a merge
   rebasing 4:8a5212ebc852 "E"
-  note: rebase of 4:8a5212ebc852 created no changes to commit
   rebasing 5:dca5924bb570 "F"
   merging E
-  note: rebase of 5:dca5924bb570 created no changes to commit
   rebasing 6:55a44ad28289 "G"
-  note: rebase of 6:55a44ad28289 created no changes to commit
   rebasing 7:417d3b648079 "H"
-  note: rebase of 7:417d3b648079 created no changes to commit
   saved backup bundle to $TESTTMP/c1/.hg/strip-backup/8a5212ebc852-f95d0879-backup.hg (glob)
 
   $ hg tglog
@@ -528,13 +514,9 @@
 
   $ hg rebase -s 1 --collapse
   rebasing 1:27547f69f254 "B"
-  note: rebase of 1:27547f69f254 created no changes to commit
   rebasing 2:f838bfaca5c7 "C"
-  note: rebase of 2:f838bfaca5c7 created no changes to commit
   rebasing 3:7bbcd6078bcc "D"
-  note: rebase of 3:7bbcd6078bcc created no changes to commit
   rebasing 4:0a42590ed746 "E"
-  note: rebase of 4:0a42590ed746 created no changes to commit
   saved backup bundle to $TESTTMP/d1/.hg/strip-backup/27547f69f254-9a3f7d92-backup.hg (glob)
 
   $ hg tglog
@@ -675,11 +657,9 @@
   merging a and d to d
   merging b and e to e
   merging c and f to f
-  note: rebase of 2:6e7340ee38c0 created no changes to commit
   rebasing 3:338e84e2e558 "move2" (tip)
   merging f and c to c
   merging e and g to g
-  note: rebase of 3:338e84e2e558 created no changes to commit
   saved backup bundle to $TESTTMP/copies/.hg/strip-backup/6e7340ee38c0-ef8ef003-backup.hg (glob)
   $ hg st
   $ hg st --copies --change tip
@@ -720,9 +700,7 @@
 
   $ hg rebase --collapse -b . -d 0
   rebasing 1:1352765a01d4 "change"
-  note: rebase of 1:1352765a01d4 created no changes to commit
   rebasing 2:64b456429f67 "Collapsed revision" (tip)
-  note: rebase of 2:64b456429f67 created no changes to commit
   saved backup bundle to $TESTTMP/copies/.hg/strip-backup/1352765a01d4-45a352ea-backup.hg (glob)
   $ hg st --change tip --copies
   M a
@@ -814,9 +792,7 @@
   $ hg book foo
   $ hg rebase -d 0 -r "1::2" --collapse -m collapsed
   rebasing 1:6d8d9f24eec3 "a"
-  note: rebase of 1:6d8d9f24eec3 created no changes to commit
   rebasing 2:1cc73eca5ecc "b" (tip foo)
-  note: rebase of 2:1cc73eca5ecc created no changes to commit
   saved backup bundle to $TESTTMP/collapseaddremove/.hg/strip-backup/6d8d9f24eec3-77d3b6e2-backup.hg (glob)
   $ hg log -G --template "{rev}: '{desc}' {bookmarks}"
   @  1: 'collapsed' foo
--- a/tests/test-rebase-conflicts.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-rebase-conflicts.t	Tue Oct 20 15:59:10 2015 -0500
@@ -64,8 +64,7 @@
   rebasing 3:3163e20567cc "L1"
   rebasing 4:46f0b057b5c0 "L2"
   merging common
-  warning: conflicts during merge.
-  merging common incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging common! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see hg resolve, then hg rebase --continue)
   [1]
 
--- a/tests/test-rebase-detach.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-rebase-detach.t	Tue Oct 20 15:59:10 2015 -0500
@@ -212,9 +212,7 @@
   
   $ hg rebase --collapse -s 2 -d 7
   rebasing 2:5fddd98957c8 "C"
-  note: rebase of 2:5fddd98957c8 created no changes to commit
   rebasing 3:32af7686d403 "D"
-  note: rebase of 3:32af7686d403 created no changes to commit
   saved backup bundle to $TESTTMP/a4/.hg/strip-backup/5fddd98957c8-f9244fa1-backup.hg (glob)
 
   $ hg  log -G --template "{rev}:{phase} '{desc}' {branches}\n"
@@ -341,11 +339,8 @@
 
   $ hg rebase -s 8 -d 7 --collapse --config ui.merge=internal:other
   rebasing 8:9790e768172d "I"
-  note: rebase of 8:9790e768172d created no changes to commit
   rebasing 9:5d7b11f5fb97 "Merge"
-  note: rebase of 9:5d7b11f5fb97 created no changes to commit
   rebasing 10:9427d4d5af81 "J" (tip)
-  note: rebase of 10:9427d4d5af81 created no changes to commit
   saved backup bundle to $TESTTMP/a6/.hg/strip-backup/9790e768172d-c2111e9d-backup.hg (glob)
 
   $ hg tglog
@@ -391,8 +386,7 @@
   $ hg rebase -s 8 -d 7 --config ui.merge=internal:fail
   rebasing 8:6215fafa5447 "H2" (tip)
   merging H
-  warning: conflicts during merge.
-  merging H incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging H! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see hg resolve, then hg rebase --continue)
   [1]
   $ hg resolve --all -t internal:local
--- a/tests/test-rebase-interruptions.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-rebase-interruptions.t	Tue Oct 20 15:59:10 2015 -0500
@@ -60,8 +60,7 @@
   rebasing 1:27547f69f254 "B"
   rebasing 2:965c486023db "C"
   merging A
-  warning: conflicts during merge.
-  merging A incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging A! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see hg resolve, then hg rebase --continue)
   [1]
 
@@ -98,8 +97,7 @@
   already rebased 1:27547f69f254 "B" as 45396c49d53b
   rebasing 2:965c486023db "C"
   merging A
-  warning: conflicts during merge.
-  merging A incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging A! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see hg resolve, then hg rebase --continue)
   [1]
 
@@ -157,8 +155,7 @@
   rebasing 1:27547f69f254 "B"
   rebasing 2:965c486023db "C"
   merging A
-  warning: conflicts during merge.
-  merging A incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging A! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see hg resolve, then hg rebase --continue)
   [1]
 
@@ -230,8 +227,7 @@
   rebasing 1:27547f69f254 "B"
   rebasing 2:965c486023db "C"
   merging A
-  warning: conflicts during merge.
-  merging A incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging A! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see hg resolve, then hg rebase --continue)
   [1]
 
--- a/tests/test-rebase-mq-skip.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-rebase-mq-skip.t	Tue Oct 20 15:59:10 2015 -0500
@@ -142,12 +142,12 @@
   $ hg up -q qtip
 
   $ HGMERGE=internal:fail hg rebase
-  rebasing 1:b4bffa6e4776 "r1" (1.diff qbase)
+  rebasing 1:b4bffa6e4776 "r1" (qbase r1)
   note: rebase of 1:b4bffa6e4776 created no changes to commit
-  rebasing 2:c0fd129beb01 "r2" (2.diff)
-  rebasing 3:6ff5b8feed8e "r3" (3.diff)
+  rebasing 2:c0fd129beb01 "r2" (r2)
+  rebasing 3:6ff5b8feed8e "r3" (r3)
   note: rebase of 3:6ff5b8feed8e created no changes to commit
-  rebasing 4:094320fec554 "r4" (4.diff)
+  rebasing 4:094320fec554 "r4" (r4)
   unresolved conflicts (see hg resolve, then hg rebase --continue)
   [1]
 
@@ -155,20 +155,20 @@
   (no more unresolved files)
 
   $ hg rebase --continue
-  already rebased 1:b4bffa6e4776 "r1" (1.diff qbase) as 057f55ff8f44
-  already rebased 2:c0fd129beb01 "r2" (2.diff) as 1660ab13ce9a
-  already rebased 3:6ff5b8feed8e "r3" (3.diff) as 1660ab13ce9a
-  rebasing 4:094320fec554 "r4" (4.diff)
+  already rebased 1:b4bffa6e4776 "r1" (qbase r1) as 057f55ff8f44
+  already rebased 2:c0fd129beb01 "r2" (r2) as 1660ab13ce9a
+  already rebased 3:6ff5b8feed8e "r3" (r3) as 1660ab13ce9a
+  rebasing 4:094320fec554 "r4" (r4)
   note: rebase of 4:094320fec554 created no changes to commit
-  rebasing 5:681a378595ba "r5" (5.diff)
-  rebasing 6:512a1f24768b "r6" (6.diff qtip)
+  rebasing 5:681a378595ba "r5" (r5)
+  rebasing 6:512a1f24768b "r6" (qtip r6)
   note: rebase of 6:512a1f24768b created no changes to commit
   saved backup bundle to $TESTTMP/b/.hg/strip-backup/b4bffa6e4776-b9bfb84d-backup.hg (glob)
 
   $ hg tglog
-  @  8: 'r5' tags: 5.diff qtip tip
+  @  8: 'r5' tags: qtip r5 tip
   |
-  o  7: 'r2' tags: 2.diff qbase
+  o  7: 'r2' tags: qbase r2
   |
   o  6: 'branch2-r6' tags: qparent
   |
--- a/tests/test-rebase-mq.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-rebase-mq.t	Tue Oct 20 15:59:10 2015 -0500
@@ -61,8 +61,7 @@
   $ hg rebase -s 2 -d 1
   rebasing 2:3504f44bffc0 "P0" (f.patch qbase)
   merging f
-  warning: conflicts during merge.
-  merging f incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging f! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see hg resolve, then hg rebase --continue)
   [1]
 
@@ -75,8 +74,7 @@
   rebasing 2:3504f44bffc0 "P0" (f.patch qbase)
   rebasing 3:929394423cd3 "P1" (f2.patch qtip tip)
   merging f
-  warning: conflicts during merge.
-  merging f incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging f! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see hg resolve, then hg rebase --continue)
   [1]
 
--- a/tests/test-rebase-named-branches.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-rebase-named-branches.t	Tue Oct 20 15:59:10 2015 -0500
@@ -97,7 +97,6 @@
   
   $ hg rebase -s dev-one -d 0 --keepbranches
   rebasing 5:643fc9128048 "dev-one named branch"
-  note: rebase of 5:643fc9128048 created no changes to commit
   rebasing 6:24de4aff8e28 "F"
   rebasing 7:4b988a958030 "G"
   rebasing 8:31d0e4ba75e6 "H"
@@ -105,13 +104,15 @@
   saved backup bundle to $TESTTMP/a1/.hg/strip-backup/643fc9128048-c4ee9ef5-backup.hg (glob)
 
   $ hg tglog
-  @  8: 'dev-two named branch' dev-two
+  @  9: 'dev-two named branch' dev-two
   |
-  o  7: 'H'
+  o  8: 'H'
   |
-  | o  6: 'G'
+  | o  7: 'G'
   |/|
-  o |  5: 'F'
+  o |  6: 'F'
+  | |
+  o |  5: 'dev-one named branch' dev-one
   | |
   | o  4: 'E'
   |/
@@ -125,20 +126,23 @@
   
   $ hg update 3
   3 files updated, 0 files merged, 3 files removed, 0 files unresolved
-  $ hg branch dev-one
+  $ hg branch -f dev-one
   marked working directory as branch dev-one
   $ hg ci -m 'dev-one named branch'
+  created new head
 
   $ hg tglog
-  @  9: 'dev-one named branch' dev-one
+  @  10: 'dev-one named branch' dev-one
   |
-  | o  8: 'dev-two named branch' dev-two
+  | o  9: 'dev-two named branch' dev-two
+  | |
+  | o  8: 'H'
   | |
-  | o  7: 'H'
-  | |
-  | | o  6: 'G'
+  | | o  7: 'G'
   | |/|
-  | o |  5: 'F'
+  | o |  6: 'F'
+  | | |
+  | o |  5: 'dev-one named branch' dev-one
   | | |
   | | o  4: 'E'
   | |/
@@ -151,11 +155,13 @@
   o  0: 'A'
   
   $ hg rebase -b 'max(branch("dev-two"))' -d dev-one --keepbranches
-  rebasing 5:77854864208c "F"
-  rebasing 6:63b4f9c788a1 "G"
-  rebasing 7:87861e68abd3 "H"
-  rebasing 8:ec00d4e0efca "dev-two named branch"
-  saved backup bundle to $TESTTMP/a1/.hg/strip-backup/77854864208c-74d59436-backup.hg (glob)
+  rebasing 5:bc8139ee757c "dev-one named branch"
+  note: rebase of 5:bc8139ee757c created no changes to commit
+  rebasing 6:42aa3cf0fa7a "F"
+  rebasing 7:1a1e6f72ec38 "G"
+  rebasing 8:904590360559 "H"
+  rebasing 9:59c2e59309fe "dev-two named branch"
+  saved backup bundle to $TESTTMP/a1/.hg/strip-backup/bc8139ee757c-f11c1080-backup.hg (glob)
 
   $ hg tglog
   o  9: 'dev-two named branch' dev-two
@@ -180,21 +186,22 @@
   
   $ hg rebase -s 'max(branch("dev-one"))' -d 0 --keepbranches
   rebasing 5:643fc9128048 "dev-one named branch"
-  note: rebase of 5:643fc9128048 created no changes to commit
-  rebasing 6:05584c618d45 "F"
-  rebasing 7:471695f5257d "G"
-  rebasing 8:8382a539a2df "H"
-  rebasing 9:11f718458b32 "dev-two named branch" (tip)
-  saved backup bundle to $TESTTMP/a1/.hg/strip-backup/643fc9128048-177f3c5c-backup.hg (glob)
+  rebasing 6:679f28760620 "F"
+  rebasing 7:549f007a9f5f "G"
+  rebasing 8:12b2bc666e20 "H"
+  rebasing 9:71325f8bc082 "dev-two named branch" (tip)
+  saved backup bundle to $TESTTMP/a1/.hg/strip-backup/643fc9128048-6cdd1a52-backup.hg (glob)
 
   $ hg tglog
-  o  8: 'dev-two named branch' dev-two
+  o  9: 'dev-two named branch' dev-two
   |
-  o  7: 'H'
+  o  8: 'H'
   |
-  | o  6: 'G'
+  | o  7: 'G'
   |/|
-  o |  5: 'F'
+  o |  6: 'F'
+  | |
+  @ |  5: 'dev-one named branch' dev-one
   | |
   | o  4: 'E'
   |/
@@ -204,61 +211,66 @@
   | |
   | o  1: 'B'
   |/
-  @  0: 'A'
+  o  0: 'A'
   
+  $ hg up -r 0 > /dev/null
 
 Rebasing descendant onto ancestor across different named branches
 
-  $ hg rebase -s 1 -d 8 --keepbranches
+  $ hg rebase -s 1 -d 9 --keepbranches
   rebasing 1:42ccdea3bb16 "B"
   rebasing 2:5fddd98957c8 "C"
   rebasing 3:32af7686d403 "D"
   saved backup bundle to $TESTTMP/a1/.hg/strip-backup/42ccdea3bb16-3cb021d3-backup.hg (glob)
 
   $ hg tglog
-  o  8: 'D'
+  o  9: 'D'
+  |
+  o  8: 'C'
   |
-  o  7: 'C'
+  o  7: 'B'
   |
-  o  6: 'B'
+  o  6: 'dev-two named branch' dev-two
   |
-  o  5: 'dev-two named branch' dev-two
-  |
-  o  4: 'H'
+  o  5: 'H'
   |
-  | o  3: 'G'
+  | o  4: 'G'
   |/|
-  o |  2: 'F'
+  o |  3: 'F'
+  | |
+  o |  2: 'dev-one named branch' dev-one
   | |
   | o  1: 'E'
   |/
   @  0: 'A'
   
-  $ hg rebase -s 4 -d 5
+  $ hg rebase -s 5 -d 6
   abort: source is ancestor of destination
   [255]
 
-  $ hg rebase -s 5 -d 4
-  rebasing 5:32d3b0de7f37 "dev-two named branch"
-  rebasing 6:580fcd9fd48f "B"
-  rebasing 7:32aba0402ed2 "C"
-  rebasing 8:e4787b575338 "D" (tip)
-  saved backup bundle to $TESTTMP/a1/.hg/strip-backup/32d3b0de7f37-c37815ca-backup.hg (glob)
+  $ hg rebase -s 6 -d 5
+  rebasing 6:3944801ae4ea "dev-two named branch"
+  rebasing 7:3bdb949809d9 "B"
+  rebasing 8:a0d543090fa4 "C"
+  rebasing 9:e9f862ce8bad "D" (tip)
+  saved backup bundle to $TESTTMP/a1/.hg/strip-backup/3944801ae4ea-fb46ed74-backup.hg (glob)
 
   $ hg tglog
-  o  8: 'D'
+  o  9: 'D'
+  |
+  o  8: 'C'
   |
-  o  7: 'C'
+  o  7: 'B'
   |
-  o  6: 'B'
+  o  6: 'dev-two named branch'
   |
-  o  5: 'dev-two named branch'
-  |
-  o  4: 'H'
+  o  5: 'H'
   |
-  | o  3: 'G'
+  | o  4: 'G'
   |/|
-  o |  2: 'F'
+  o |  3: 'F'
+  | |
+  o |  2: 'dev-one named branch' dev-one
   | |
   | o  1: 'E'
   |/
@@ -272,13 +284,13 @@
   $ hg ci -m 'create b'
   $ hg ci -m 'close b' --close
   $ hg rebase -b 8 -d b
-  reopening closed branch head ea9de14a36c6
-  rebasing 4:86693275b2ef "H"
-  rebasing 5:2149726d0970 "dev-two named branch"
-  rebasing 6:81e55225e95d "B"
-  rebasing 7:09eda3dc3195 "C"
-  rebasing 8:31298fc9d159 "D"
-  saved backup bundle to $TESTTMP/a1/.hg/strip-backup/86693275b2ef-f9fcf4e2-backup.hg (glob)
+  reopening closed branch head 2b586e70108d
+  rebasing 5:8e279d293175 "H"
+  rebasing 6:c57724c84928 "dev-two named branch"
+  rebasing 7:160b0930ccc6 "B"
+  rebasing 8:810110211f50 "C"
+  rebasing 9:e522577ccdbd "D"
+  saved backup bundle to $TESTTMP/a1/.hg/strip-backup/8e279d293175-b023e27c-backup.hg (glob)
 
   $ cd ..
 
--- a/tests/test-rebase-obsolete.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-rebase-obsolete.t	Tue Oct 20 15:59:10 2015 -0500
@@ -203,10 +203,9 @@
   |/
   o  0:cd010b8cd998 A
   
-  $ hg rebase --source 'desc(B)' --dest 'tip'
+  $ hg rebase --source 'desc(B)' --dest 'tip' --config experimental.rebaseskipobsolete=True
   rebasing 8:8877864f1edb "B"
-  rebasing 9:08483444fef9 "D"
-  note: rebase of 9:08483444fef9 created no changes to commit
+  note: not rebasing 9:08483444fef9 "D", already in destination as 11:4596109a6a43 "D"
   rebasing 10:5ae4c968c6ac "C"
   $ hg debugobsolete
   42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (*) {'user': 'test'} (glob)
@@ -214,7 +213,6 @@
   32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (*) {'user': 'test'} (glob)
   08483444fef91d6224f6655ee586a65d263ad34c 4596109a6a4328c398bde3a4a3b6737cfade3003 0 (*) {'user': 'test'} (glob)
   8877864f1edb05d0e07dc4ba77b67a80a7b86672 462a34d07e599b87ea08676a449373fe4e2e1347 0 (*) {'user': 'test'} (glob)
-  08483444fef91d6224f6655ee586a65d263ad34c 0 {8877864f1edb05d0e07dc4ba77b67a80a7b86672} (*) {'user': 'test'} (glob)
   5ae4c968c6aca831df823664e706c9d4aa34473d 98f6af4ee9539e14da4465128f894c274900b6e5 0 (*) {'user': 'test'} (glob)
   $ hg log --rev 'divergent()'
   $ hg log -G
@@ -261,11 +259,8 @@
   $ cd collapse
   $ hg rebase  -s 42ccdea3bb16 -d eea13746799a --collapse
   rebasing 1:42ccdea3bb16 "B"
-  note: rebase of 1:42ccdea3bb16 created no changes to commit
   rebasing 2:5fddd98957c8 "C"
-  note: rebase of 2:5fddd98957c8 created no changes to commit
   rebasing 3:32af7686d403 "D"
-  note: rebase of 3:32af7686d403 created no changes to commit
   $ hg log -G
   o  8:4dc2197e807b Collapsed revision
   |
@@ -540,3 +535,103 @@
   |/
   o  0:cd010b8cd998 A
   
+  $ hg up 14 -C
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  $ echo "K" > K
+  $ hg add K
+  $ hg commit --amend -m "K"
+  $ echo "L" > L
+  $ hg add L
+  $ hg commit -m "L"
+  $ hg up '.^'
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  $ echo "M" > M
+  $ hg add M
+  $ hg commit --amend -m "M"
+  $ hg log -G
+  @  20:bfaedf8eb73b M
+  |
+  | o  18:97219452e4bd L
+  | |
+  | x  17:fc37a630c901 K
+  |/
+  | o  15:5ae8a643467b J
+  | |
+  | x  14:9ad579b4a5de I
+  |/
+  | o  12:acd174b7ab39 I
+  | |
+  | o  11:6c11a6218c97 H
+  | |
+  o |  10:b5313c85b22e D
+  |/
+  | o    8:53a6a128b2b7 M
+  | |\
+  | | x  7:02de42196ebe H
+  | | |
+  o---+  6:eea13746799a G
+  | | |
+  | | o  5:24b6387c8c8c F
+  | | |
+  o---+  4:9520eea781bc E
+   / /
+  x |  3:32af7686d403 D
+  | |
+  o |  2:5fddd98957c8 C
+  | |
+  o |  1:42ccdea3bb16 B
+  |/
+  o  0:cd010b8cd998 A
+  
+  $ hg rebase -s 14 -d 18 --config experimental.rebaseskipobsolete=True
+  note: not rebasing 14:9ad579b4a5de "I", already in destination as 17:fc37a630c901 "K"
+  rebasing 15:5ae8a643467b "J"
+
+  $ cd ..
+
+Skip obsolete changeset even with multiple hops
+-----------------------------------------------
+
+setup
+
+  $ hg init obsskip
+  $ cd obsskip
+  $ cat << EOF >> .hg/hgrc
+  > [experimental]
+  > rebaseskipobsolete = True
+  > [extensions]
+  > strip =
+  > EOF
+  $ echo A > A
+  $ hg add A
+  $ hg commit -m A
+  $ echo B > B
+  $ hg add B
+  $ hg commit -m B0
+  $ hg commit --amend -m B1
+  $ hg commit --amend -m B2
+  $ hg up --hidden 'desc(B0)'
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ echo C > C
+  $ hg add C
+  $ hg commit -m C
+
+Rebase finds its way in a chain of marker
+
+  $ hg rebase -d 'desc(B2)'
+  note: not rebasing 1:a8b11f55fb19 "B0", already in destination as 3:261e70097290 "B2"
+  rebasing 4:212cb178bcbb "C" (tip)
+
+Even when the chain include missing node
+
+  $ hg up --hidden 'desc(B0)'
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  $ echo D > D
+  $ hg add D
+  $ hg commit -m D
+  $ hg --hidden strip -r 'desc(B1)'
+  saved backup bundle to $TESTTMP/obsskip/.hg/strip-backup/86f6414ccda7-b1c452ee-backup.hg (glob)
+
+  $ hg rebase -d 'desc(B2)'
+  note: not rebasing 1:a8b11f55fb19 "B0", already in destination as 2:261e70097290 "B2"
+  rebasing 5:1a79b7535141 "D" (tip)
--- a/tests/test-rebase-parameters.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-rebase-parameters.t	Tue Oct 20 15:59:10 2015 -0500
@@ -485,17 +485,16 @@
   $ hg resolve -m c2
   (no more unresolved files)
   $ hg rebase -c --tool internal:fail
-  tool option will be ignored
   rebasing 2:e4e3f3546619 "c2b" (tip)
   note: rebase of 2:e4e3f3546619 created no changes to commit
   saved backup bundle to $TESTTMP/b3/.hg/strip-backup/e4e3f3546619-b0841178-backup.hg (glob)
 
   $ hg rebase -i
-  abort: interactive history editing is supported by the 'histedit' extension (see "hg help histedit")
+  abort: interactive history editing is supported by the 'histedit' extension (see "hg --config extensions.histedit= help -e histedit")
   [255]
 
   $ hg rebase --interactive
-  abort: interactive history editing is supported by the 'histedit' extension (see "hg help histedit")
+  abort: interactive history editing is supported by the 'histedit' extension (see "hg --config extensions.histedit= help -e histedit")
   [255]
 
   $ cd ..
--- a/tests/test-rebase-pull.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-rebase-pull.t	Tue Oct 20 15:59:10 2015 -0500
@@ -185,7 +185,7 @@
   o  0: 'C1'
   
   $ cd ../c
-  $ hg pull --rebase --config experimental.bundle2-exp=True --config experimental.strip-bundle2-version=02
+  $ hg pull --rebase
   pulling from $TESTTMP/a (glob)
   searching for changes
   adding changesets
--- a/tests/test-rebase-rename.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-rebase-rename.t	Tue Oct 20 15:59:10 2015 -0500
@@ -310,13 +310,10 @@
   created new head
   $ hg rebase -s 2 --dest 5 --collapse
   rebasing 2:68bf06433839 "File b created as copy of a and modified"
-  note: rebase of 2:68bf06433839 created no changes to commit
   rebasing 3:af74b229bc02 "File c created as copy of b and modified"
   merging b and c to c
-  note: rebase of 3:af74b229bc02 created no changes to commit
   rebasing 4:dbb9ba033561 "File d created as copy of c and modified"
   merging c and d to d
-  note: rebase of 4:dbb9ba033561 created no changes to commit
   saved backup bundle to $TESTTMP/copy-gets-preserved/.hg/strip-backup/68bf06433839-dde37595-backup.hg (glob)
   $ hg co tip
   3 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-rebase-scenario-global.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-rebase-scenario-global.t	Tue Oct 20 15:59:10 2015 -0500
@@ -743,3 +743,14 @@
   rebasing 2:779a07b1b7a0 "first source commit"
   rebasing 3:a7d6f3a00bf3 "second source with subdir" (tip)
   saved backup bundle to $TESTTMP/cwd-vanish/.hg/strip-backup/779a07b1b7a0-853e0073-backup.hg (glob)
+
+Test experimental revset
+
+  $ cd ..
+  $ hg log -r '_destrebase()'
+  changeset:   3:1910d5ff34ea
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     second source with subdir
+  
--- a/tests/test-rename-dir-merge.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-rename-dir-merge.t	Tue Oct 20 15:59:10 2015 -0500
@@ -131,8 +131,7 @@
   $ hg commit -qm 'new file in target directory'
   $ hg merge 2
   merging b/c and a/c to b/c
-  warning: conflicts during merge.
-  merging b/c incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging b/c! (edit, then use 'hg resolve --mark')
   0 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
   [1]
@@ -162,8 +161,7 @@
   C a/c
   $ hg merge 5
   merging a/c and b/c to b/c
-  warning: conflicts during merge.
-  merging b/c incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging b/c! (edit, then use 'hg resolve --mark')
   2 files updated, 0 files merged, 2 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
   [1]
--- a/tests/test-rename-merge1.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-rename-merge1.t	Tue Oct 20 15:59:10 2015 -0500
@@ -40,8 +40,8 @@
   removing a
    b2: remote created -> g
   getting b2
-   b: remote moved from a -> m
-  picked tool 'internal:merge' for b (binary False symlink False)
+   b: remote moved from a -> m (premerge)
+  picked tool ':merge' for b (binary False symlink False)
   merging a and b to b
   my b@044f8520aeeb+ other b@85c198ef2f6c ancestor a@af1939970a1c
    premerge successful
--- a/tests/test-rename-merge2.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-rename-merge2.t	Tue Oct 20 15:59:10 2015 -0500
@@ -89,15 +89,18 @@
    preserving a for resolve of b
    preserving rev for resolve of rev
    a: remote unchanged -> k
-   b: remote copied from a -> m
+   b: remote copied from a -> m (premerge)
   picked tool 'python ../merge' for b (binary False symlink False)
   merging a and b to b
   my b@e300d1c794ec+ other b@4ce40f5aca24 ancestor a@924404dff337
    premerge successful
-   rev: versions differ -> m
+   rev: versions differ -> m (premerge)
   picked tool 'python ../merge' for rev (binary False symlink False)
   merging rev
   my rev@e300d1c794ec+ other rev@4ce40f5aca24 ancestor rev@924404dff337
+   rev: versions differ -> m (merge)
+  picked tool 'python ../merge' for rev (binary False symlink False)
+  my rev@e300d1c794ec+ other rev@4ce40f5aca24 ancestor rev@924404dff337
   launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
   0 files updated, 2 files merged, 0 files removed, 0 files unresolved
@@ -126,16 +129,19 @@
    preserving rev for resolve of rev
    a: remote is newer -> g
   getting a
-   b: local copied/moved from a -> m
+   b: local copied/moved from a -> m (premerge)
   picked tool 'python ../merge' for b (binary False symlink False)
   merging b and a to b
   my b@86a2aa42fc76+ other a@f4db7e329e71 ancestor a@924404dff337
    premerge successful
-   rev: versions differ -> m
+   rev: versions differ -> m (premerge)
   picked tool 'python ../merge' for rev (binary False symlink False)
   merging rev
   my rev@86a2aa42fc76+ other rev@f4db7e329e71 ancestor rev@924404dff337
-  launching merge tool: python ../merge *$TESTTMP/t/t/rev* * (glob)
+   rev: versions differ -> m (merge)
+  picked tool 'python ../merge' for rev (binary False symlink False)
+  my rev@86a2aa42fc76+ other rev@f4db7e329e71 ancestor rev@924404dff337
+  launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
   1 files updated, 2 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
@@ -162,15 +168,18 @@
    preserving a for resolve of b
    preserving rev for resolve of rev
   removing a
-   b: remote moved from a -> m
+   b: remote moved from a -> m (premerge)
   picked tool 'python ../merge' for b (binary False symlink False)
   merging a and b to b
   my b@e300d1c794ec+ other b@bdb19105162a ancestor a@924404dff337
    premerge successful
-   rev: versions differ -> m
+   rev: versions differ -> m (premerge)
   picked tool 'python ../merge' for rev (binary False symlink False)
   merging rev
   my rev@e300d1c794ec+ other rev@bdb19105162a ancestor rev@924404dff337
+   rev: versions differ -> m (merge)
+  picked tool 'python ../merge' for rev (binary False symlink False)
+  my rev@e300d1c794ec+ other rev@bdb19105162a ancestor rev@924404dff337
   launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
   0 files updated, 2 files merged, 0 files removed, 0 files unresolved
@@ -196,15 +205,18 @@
    ancestor: 924404dff337, local: 02963e448370+, remote: f4db7e329e71
    preserving b for resolve of b
    preserving rev for resolve of rev
-   b: local copied/moved from a -> m
+   b: local copied/moved from a -> m (premerge)
   picked tool 'python ../merge' for b (binary False symlink False)
   merging b and a to b
   my b@02963e448370+ other a@f4db7e329e71 ancestor a@924404dff337
    premerge successful
-   rev: versions differ -> m
+   rev: versions differ -> m (premerge)
   picked tool 'python ../merge' for rev (binary False symlink False)
   merging rev
   my rev@02963e448370+ other rev@f4db7e329e71 ancestor rev@924404dff337
+   rev: versions differ -> m (merge)
+  picked tool 'python ../merge' for rev (binary False symlink False)
+  my rev@02963e448370+ other rev@f4db7e329e71 ancestor rev@924404dff337
   launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
   0 files updated, 2 files merged, 0 files removed, 0 files unresolved
@@ -231,10 +243,13 @@
    preserving rev for resolve of rev
    b: remote created -> g
   getting b
-   rev: versions differ -> m
+   rev: versions differ -> m (premerge)
   picked tool 'python ../merge' for rev (binary False symlink False)
   merging rev
   my rev@94b33a1b7f2d+ other rev@4ce40f5aca24 ancestor rev@924404dff337
+   rev: versions differ -> m (merge)
+  picked tool 'python ../merge' for rev (binary False symlink False)
+  my rev@94b33a1b7f2d+ other rev@4ce40f5aca24 ancestor rev@924404dff337
   launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
   1 files updated, 1 files merged, 0 files removed, 0 files unresolved
@@ -259,10 +274,13 @@
    branchmerge: True, force: False, partial: False
    ancestor: 924404dff337, local: 86a2aa42fc76+, remote: 97c705ade336
    preserving rev for resolve of rev
-   rev: versions differ -> m
+   rev: versions differ -> m (premerge)
   picked tool 'python ../merge' for rev (binary False symlink False)
   merging rev
   my rev@86a2aa42fc76+ other rev@97c705ade336 ancestor rev@924404dff337
+   rev: versions differ -> m (merge)
+  picked tool 'python ../merge' for rev (binary False symlink False)
+  my rev@86a2aa42fc76+ other rev@97c705ade336 ancestor rev@924404dff337
   launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
   0 files updated, 1 files merged, 0 files removed, 0 files unresolved
@@ -291,10 +309,13 @@
   removing a
    b: remote created -> g
   getting b
-   rev: versions differ -> m
+   rev: versions differ -> m (premerge)
   picked tool 'python ../merge' for rev (binary False symlink False)
   merging rev
   my rev@94b33a1b7f2d+ other rev@bdb19105162a ancestor rev@924404dff337
+   rev: versions differ -> m (merge)
+  picked tool 'python ../merge' for rev (binary False symlink False)
+  my rev@94b33a1b7f2d+ other rev@bdb19105162a ancestor rev@924404dff337
   launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
   1 files updated, 1 files merged, 1 files removed, 0 files unresolved
@@ -318,10 +339,13 @@
    branchmerge: True, force: False, partial: False
    ancestor: 924404dff337, local: 02963e448370+, remote: 97c705ade336
    preserving rev for resolve of rev
-   rev: versions differ -> m
+   rev: versions differ -> m (premerge)
   picked tool 'python ../merge' for rev (binary False symlink False)
   merging rev
   my rev@02963e448370+ other rev@97c705ade336 ancestor rev@924404dff337
+   rev: versions differ -> m (merge)
+  picked tool 'python ../merge' for rev (binary False symlink False)
+  my rev@02963e448370+ other rev@97c705ade336 ancestor rev@924404dff337
   launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
   0 files updated, 1 files merged, 0 files removed, 0 files unresolved
@@ -343,15 +367,21 @@
    ancestor: 924404dff337, local: 62e7bf090eba+, remote: 49b6d8032493
    preserving b for resolve of b
    preserving rev for resolve of rev
-   b: both renamed from a -> m
+   b: both renamed from a -> m (premerge)
   picked tool 'python ../merge' for b (binary False symlink False)
   merging b
   my b@62e7bf090eba+ other b@49b6d8032493 ancestor a@924404dff337
+   rev: versions differ -> m (premerge)
+  picked tool 'python ../merge' for rev (binary False symlink False)
+  merging rev
+  my rev@62e7bf090eba+ other rev@49b6d8032493 ancestor rev@924404dff337
+   b: both renamed from a -> m (merge)
+  picked tool 'python ../merge' for b (binary False symlink False)
+  my b@62e7bf090eba+ other b@49b6d8032493 ancestor a@924404dff337
   launching merge tool: python ../merge *$TESTTMP/t/t/b* * * (glob)
   merge tool returned: 0
-   rev: versions differ -> m
+   rev: versions differ -> m (merge)
   picked tool 'python ../merge' for rev (binary False symlink False)
-  merging rev
   my rev@62e7bf090eba+ other rev@49b6d8032493 ancestor rev@924404dff337
   launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
@@ -384,10 +414,13 @@
    preserving rev for resolve of rev
    c: remote created -> g
   getting c
-   rev: versions differ -> m
+   rev: versions differ -> m (premerge)
   picked tool 'python ../merge' for rev (binary False symlink False)
   merging rev
   my rev@02963e448370+ other rev@fe905ef2c33e ancestor rev@924404dff337
+   rev: versions differ -> m (merge)
+  picked tool 'python ../merge' for rev (binary False symlink False)
+  my rev@02963e448370+ other rev@fe905ef2c33e ancestor rev@924404dff337
   launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
   note: possible conflict - a was renamed multiple times to:
@@ -413,15 +446,21 @@
    ancestor: 924404dff337, local: 86a2aa42fc76+, remote: af30c7647fc7
    preserving b for resolve of b
    preserving rev for resolve of rev
-   b: both created -> m
+   b: both created -> m (premerge)
   picked tool 'python ../merge' for b (binary False symlink False)
   merging b
   my b@86a2aa42fc76+ other b@af30c7647fc7 ancestor b@000000000000
+   rev: versions differ -> m (premerge)
+  picked tool 'python ../merge' for rev (binary False symlink False)
+  merging rev
+  my rev@86a2aa42fc76+ other rev@af30c7647fc7 ancestor rev@924404dff337
+   b: both created -> m (merge)
+  picked tool 'python ../merge' for b (binary False symlink False)
+  my b@86a2aa42fc76+ other b@af30c7647fc7 ancestor b@000000000000
   launching merge tool: python ../merge *$TESTTMP/t/t/b* * * (glob)
   merge tool returned: 0
-   rev: versions differ -> m
+   rev: versions differ -> m (merge)
   picked tool 'python ../merge' for rev (binary False symlink False)
-  merging rev
   my rev@86a2aa42fc76+ other rev@af30c7647fc7 ancestor rev@924404dff337
   launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
@@ -447,15 +486,21 @@
    preserving rev for resolve of rev
    a: other deleted -> r
   removing a
-   b: both created -> m
+   b: both created -> m (premerge)
   picked tool 'python ../merge' for b (binary False symlink False)
   merging b
   my b@59318016310c+ other b@bdb19105162a ancestor b@000000000000
+   rev: versions differ -> m (premerge)
+  picked tool 'python ../merge' for rev (binary False symlink False)
+  merging rev
+  my rev@59318016310c+ other rev@bdb19105162a ancestor rev@924404dff337
+   b: both created -> m (merge)
+  picked tool 'python ../merge' for b (binary False symlink False)
+  my b@59318016310c+ other b@bdb19105162a ancestor b@000000000000
   launching merge tool: python ../merge *$TESTTMP/t/t/b* * * (glob)
   merge tool returned: 0
-   rev: versions differ -> m
+   rev: versions differ -> m (merge)
   picked tool 'python ../merge' for rev (binary False symlink False)
-  merging rev
   my rev@59318016310c+ other rev@bdb19105162a ancestor rev@924404dff337
   launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
@@ -480,15 +525,21 @@
    preserving rev for resolve of rev
    a: remote is newer -> g
   getting a
-   b: both created -> m
+   b: both created -> m (premerge)
   picked tool 'python ../merge' for b (binary False symlink False)
   merging b
   my b@86a2aa42fc76+ other b@8dbce441892a ancestor b@000000000000
+   rev: versions differ -> m (premerge)
+  picked tool 'python ../merge' for rev (binary False symlink False)
+  merging rev
+  my rev@86a2aa42fc76+ other rev@8dbce441892a ancestor rev@924404dff337
+   b: both created -> m (merge)
+  picked tool 'python ../merge' for b (binary False symlink False)
+  my b@86a2aa42fc76+ other b@8dbce441892a ancestor b@000000000000
   launching merge tool: python ../merge *$TESTTMP/t/t/b* * * (glob)
   merge tool returned: 0
-   rev: versions differ -> m
+   rev: versions differ -> m (merge)
   picked tool 'python ../merge' for rev (binary False symlink False)
-  merging rev
   my rev@86a2aa42fc76+ other rev@8dbce441892a ancestor rev@924404dff337
   launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
@@ -514,15 +565,21 @@
    preserving rev for resolve of rev
    a: other deleted -> r
   removing a
-   b: both created -> m
+   b: both created -> m (premerge)
   picked tool 'python ../merge' for b (binary False symlink False)
   merging b
   my b@59318016310c+ other b@bdb19105162a ancestor b@000000000000
+   rev: versions differ -> m (premerge)
+  picked tool 'python ../merge' for rev (binary False symlink False)
+  merging rev
+  my rev@59318016310c+ other rev@bdb19105162a ancestor rev@924404dff337
+   b: both created -> m (merge)
+  picked tool 'python ../merge' for b (binary False symlink False)
+  my b@59318016310c+ other b@bdb19105162a ancestor b@000000000000
   launching merge tool: python ../merge *$TESTTMP/t/t/b* * * (glob)
   merge tool returned: 0
-   rev: versions differ -> m
+   rev: versions differ -> m (merge)
   picked tool 'python ../merge' for rev (binary False symlink False)
-  merging rev
   my rev@59318016310c+ other rev@bdb19105162a ancestor rev@924404dff337
   launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
@@ -547,15 +604,21 @@
    preserving rev for resolve of rev
    a: remote is newer -> g
   getting a
-   b: both created -> m
+   b: both created -> m (premerge)
   picked tool 'python ../merge' for b (binary False symlink False)
   merging b
   my b@86a2aa42fc76+ other b@8dbce441892a ancestor b@000000000000
+   rev: versions differ -> m (premerge)
+  picked tool 'python ../merge' for rev (binary False symlink False)
+  merging rev
+  my rev@86a2aa42fc76+ other rev@8dbce441892a ancestor rev@924404dff337
+   b: both created -> m (merge)
+  picked tool 'python ../merge' for b (binary False symlink False)
+  my b@86a2aa42fc76+ other b@8dbce441892a ancestor b@000000000000
   launching merge tool: python ../merge *$TESTTMP/t/t/b* * * (glob)
   merge tool returned: 0
-   rev: versions differ -> m
+   rev: versions differ -> m (merge)
   picked tool 'python ../merge' for rev (binary False symlink False)
-  merging rev
   my rev@86a2aa42fc76+ other rev@8dbce441892a ancestor rev@924404dff337
   launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
@@ -580,15 +643,21 @@
    preserving b for resolve of b
    preserving rev for resolve of rev
    a: remote unchanged -> k
-   b: both created -> m
+   b: both created -> m (premerge)
   picked tool 'python ../merge' for b (binary False symlink False)
   merging b
   my b@0b76e65c8289+ other b@4ce40f5aca24 ancestor b@000000000000
+   rev: versions differ -> m (premerge)
+  picked tool 'python ../merge' for rev (binary False symlink False)
+  merging rev
+  my rev@0b76e65c8289+ other rev@4ce40f5aca24 ancestor rev@924404dff337
+   b: both created -> m (merge)
+  picked tool 'python ../merge' for b (binary False symlink False)
+  my b@0b76e65c8289+ other b@4ce40f5aca24 ancestor b@000000000000
   launching merge tool: python ../merge *$TESTTMP/t/t/b* * * (glob)
   merge tool returned: 0
-   rev: versions differ -> m
+   rev: versions differ -> m (merge)
   picked tool 'python ../merge' for rev (binary False symlink False)
-  merging rev
   my rev@0b76e65c8289+ other rev@4ce40f5aca24 ancestor rev@924404dff337
   launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
@@ -616,15 +685,21 @@
    preserving rev for resolve of rev
    a: prompt recreating -> g
   getting a
-   b: both created -> m
+   b: both created -> m (premerge)
   picked tool 'python ../merge' for b (binary False symlink False)
   merging b
   my b@02963e448370+ other b@8dbce441892a ancestor b@000000000000
+   rev: versions differ -> m (premerge)
+  picked tool 'python ../merge' for rev (binary False symlink False)
+  merging rev
+  my rev@02963e448370+ other rev@8dbce441892a ancestor rev@924404dff337
+   b: both created -> m (merge)
+  picked tool 'python ../merge' for b (binary False symlink False)
+  my b@02963e448370+ other b@8dbce441892a ancestor b@000000000000
   launching merge tool: python ../merge *$TESTTMP/t/t/b* * * (glob)
   merge tool returned: 0
-   rev: versions differ -> m
+   rev: versions differ -> m (merge)
   picked tool 'python ../merge' for rev (binary False symlink False)
-  merging rev
   my rev@02963e448370+ other rev@8dbce441892a ancestor rev@924404dff337
   launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
@@ -651,15 +726,21 @@
    preserving b for resolve of b
    preserving rev for resolve of rev
    a: prompt keep -> a
-   b: both created -> m
+   b: both created -> m (premerge)
   picked tool 'python ../merge' for b (binary False symlink False)
   merging b
   my b@0b76e65c8289+ other b@bdb19105162a ancestor b@000000000000
+   rev: versions differ -> m (premerge)
+  picked tool 'python ../merge' for rev (binary False symlink False)
+  merging rev
+  my rev@0b76e65c8289+ other rev@bdb19105162a ancestor rev@924404dff337
+   b: both created -> m (merge)
+  picked tool 'python ../merge' for b (binary False symlink False)
+  my b@0b76e65c8289+ other b@bdb19105162a ancestor b@000000000000
   launching merge tool: python ../merge *$TESTTMP/t/t/b* * * (glob)
   merge tool returned: 0
-   rev: versions differ -> m
+   rev: versions differ -> m (merge)
   picked tool 'python ../merge' for rev (binary False symlink False)
-  merging rev
   my rev@0b76e65c8289+ other rev@bdb19105162a ancestor rev@924404dff337
   launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
@@ -687,15 +768,21 @@
    preserving a for resolve of b
    preserving rev for resolve of rev
   removing a
-   b: remote moved from a -> m
+   b: remote moved from a -> m (premerge)
   picked tool 'python ../merge' for b (binary False symlink False)
   merging a and b to b
   my b@e300d1c794ec+ other b@49b6d8032493 ancestor a@924404dff337
+   rev: versions differ -> m (premerge)
+  picked tool 'python ../merge' for rev (binary False symlink False)
+  merging rev
+  my rev@e300d1c794ec+ other rev@49b6d8032493 ancestor rev@924404dff337
+   b: remote moved from a -> m (merge)
+  picked tool 'python ../merge' for b (binary False symlink False)
+  my b@e300d1c794ec+ other b@49b6d8032493 ancestor a@924404dff337
   launching merge tool: python ../merge *$TESTTMP/t/t/b* * * (glob)
   merge tool returned: 0
-   rev: versions differ -> m
+   rev: versions differ -> m (merge)
   picked tool 'python ../merge' for rev (binary False symlink False)
-  merging rev
   my rev@e300d1c794ec+ other rev@49b6d8032493 ancestor rev@924404dff337
   launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
@@ -722,15 +809,21 @@
    ancestor: 924404dff337, local: 62e7bf090eba+, remote: f4db7e329e71
    preserving b for resolve of b
    preserving rev for resolve of rev
-   b: local copied/moved from a -> m
+   b: local copied/moved from a -> m (premerge)
   picked tool 'python ../merge' for b (binary False symlink False)
   merging b and a to b
   my b@62e7bf090eba+ other a@f4db7e329e71 ancestor a@924404dff337
+   rev: versions differ -> m (premerge)
+  picked tool 'python ../merge' for rev (binary False symlink False)
+  merging rev
+  my rev@62e7bf090eba+ other rev@f4db7e329e71 ancestor rev@924404dff337
+   b: local copied/moved from a -> m (merge)
+  picked tool 'python ../merge' for b (binary False symlink False)
+  my b@62e7bf090eba+ other a@f4db7e329e71 ancestor a@924404dff337
   launching merge tool: python ../merge *$TESTTMP/t/t/b* * * (glob)
   merge tool returned: 0
-   rev: versions differ -> m
+   rev: versions differ -> m (merge)
   picked tool 'python ../merge' for rev (binary False symlink False)
-  merging rev
   my rev@62e7bf090eba+ other rev@f4db7e329e71 ancestor rev@924404dff337
   launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
@@ -764,15 +857,18 @@
    preserving rev for resolve of rev
    c: remote created -> g
   getting c
-   b: local copied/moved from a -> m
+   b: local copied/moved from a -> m (premerge)
   picked tool 'python ../merge' for b (binary False symlink False)
   merging b and a to b
   my b@02963e448370+ other a@2b958612230f ancestor a@924404dff337
    premerge successful
-   rev: versions differ -> m
+   rev: versions differ -> m (premerge)
   picked tool 'python ../merge' for rev (binary False symlink False)
   merging rev
   my rev@02963e448370+ other rev@2b958612230f ancestor rev@924404dff337
+   rev: versions differ -> m (merge)
+  picked tool 'python ../merge' for rev (binary False symlink False)
+  my rev@02963e448370+ other rev@2b958612230f ancestor rev@924404dff337
   launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
   merge tool returned: 0
   1 files updated, 2 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-requires.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-requires.t	Tue Oct 20 15:59:10 2015 -0500
@@ -10,12 +10,12 @@
   $ echo indoor-pool > .hg/requires
   $ hg tip
   abort: repository requires features unknown to this Mercurial: indoor-pool!
-  (see http://mercurial.selenic.com/wiki/MissingRequirement for more information)
+  (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
   [255]
   $ echo outdoor-pool >> .hg/requires
   $ hg tip
   abort: repository requires features unknown to this Mercurial: indoor-pool outdoor-pool!
-  (see http://mercurial.selenic.com/wiki/MissingRequirement for more information)
+  (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
   [255]
   $ cd ..
 
@@ -63,7 +63,7 @@
 
   $ hg clone supported clone-dst
   abort: repository requires features unknown to this Mercurial: featuresetup-test!
-  (see http://mercurial.selenic.com/wiki/MissingRequirement for more information)
+  (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
   [255]
   $ hg clone --pull supported clone-dst
   abort: required features are not supported in the destination: featuresetup-test
--- a/tests/test-resolve.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-resolve.t	Tue Oct 20 15:59:10 2015 -0500
@@ -53,6 +53,58 @@
   arguments do not match paths that need resolving
   $ hg resolve -l does-not-exist
 
+don't allow marking or unmarking driver-resolved files
+
+  $ cat > $TESTTMP/markdriver.py << EOF
+  > '''mark and unmark files as driver-resolved'''
+  > from mercurial import cmdutil, merge, scmutil
+  > cmdtable = {}
+  > command = cmdutil.command(cmdtable)
+  > @command('markdriver',
+  >   [('u', 'unmark', None, '')],
+  >   'FILE...')
+  > def markdriver(ui, repo, *pats, **opts):
+  >     wlock = repo.wlock()
+  >     try:
+  >         ms = merge.mergestate(repo)
+  >         m = scmutil.match(repo[None], pats, opts)
+  >         for f in ms:
+  >             if not m(f):
+  >                 continue
+  >             if not opts['unmark']:
+  >                 ms.mark(f, 'd')
+  >             else:
+  >                 ms.mark(f, 'u')
+  >         ms.commit()
+  >     finally:
+  >         wlock.release()
+  > EOF
+  $ hg --config extensions.markdriver=$TESTTMP/markdriver.py markdriver file1
+  $ hg resolve --list
+  D file1
+  U file2
+  $ hg resolve --mark file1
+  not marking file1 as it is driver-resolved
+this should not print out file1
+  $ hg resolve --mark --all
+  (no more unresolved files -- run "hg resolve --all" to conclude)
+  $ hg resolve --mark 'glob:file*'
+  (no more unresolved files -- run "hg resolve --all" to conclude)
+  $ hg resolve --list
+  D file1
+  R file2
+  $ hg resolve --unmark file1
+  not unmarking file1 as it is driver-resolved
+  (no more unresolved files -- run "hg resolve --all" to conclude)
+  $ hg resolve --unmark --all
+  $ hg resolve --list
+  D file1
+  U file2
+  $ hg --config extensions.markdriver=$TESTTMP/markdriver.py markdriver --unmark file1
+  $ hg resolve --list
+  U file1
+  U file2
+
 resolve the failure
 
   $ echo resolved > file1
@@ -129,24 +181,29 @@
 resolve without arguments should suggest --all
   $ hg resolve
   abort: no files or directories specified
-  (use --all to remerge all files)
+  (use --all to re-merge all unresolved files)
   [255]
 
 resolve --all should re-merge all unresolved files
-  $ hg resolve -q --all
-  warning: conflicts during merge.
-  merging file1 incomplete! (edit conflicts, then use 'hg resolve --mark')
-  warning: conflicts during merge.
-  merging file2 incomplete! (edit conflicts, then use 'hg resolve --mark')
+  $ hg resolve --all
+  merging file1
+  merging file2
+  warning: conflicts while merging file1! (edit, then use 'hg resolve --mark')
+  warning: conflicts while merging file2! (edit, then use 'hg resolve --mark')
   [1]
+  $ cat file1.orig
+  foo
+  baz
+  $ cat file2.orig
+  foo
+  baz
   $ grep '<<<' file1 > /dev/null
   $ grep '<<<' file2 > /dev/null
 
 resolve <file> should re-merge file
   $ echo resolved > file1
   $ hg resolve -q file1
-  warning: conflicts during merge.
-  merging file1 incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging file1! (edit, then use 'hg resolve --mark')
   [1]
   $ grep '<<<' file1 > /dev/null
 
--- a/tests/test-revset.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-revset.t	Tue Oct 20 15:59:10 2015 -0500
@@ -141,7 +141,7 @@
     ('symbol', '3')
     ('symbol', '6'))
   * set:
-  <baseset [3, 5, 6]>
+  <baseset+ [3, 5, 6]>
   3
   5
   6
@@ -197,11 +197,53 @@
   <filteredset
     <baseset [7]>>
   7
-  $ try -- '-a-b-c-' # complains
-  hg: parse error at 7: not a prefix: end
-  [255]
-  $ log -a-b-c- # succeeds with fallback
+
+names that should be caught by fallback mechanism
+
+  $ try -- '-a-b-c-'
+  ('symbol', '-a-b-c-')
+  * set:
+  <baseset [4]>
+  4
+  $ log -a-b-c-
+  4
+  $ try '+a+b+c+'
+  ('symbol', '+a+b+c+')
+  * set:
+  <baseset [3]>
+  3
+  $ try '+a+b+c+:'
+  (rangepost
+    ('symbol', '+a+b+c+'))
+  * set:
+  <spanset+ 3:9>
+  3
   4
+  5
+  6
+  7
+  8
+  9
+  $ try ':+a+b+c+'
+  (rangepre
+    ('symbol', '+a+b+c+'))
+  * set:
+  <spanset+ 0:3>
+  0
+  1
+  2
+  3
+  $ try -- '-a-b-c-:+a+b+c+'
+  (range
+    ('symbol', '-a-b-c-')
+    ('symbol', '+a+b+c+'))
+  * set:
+  <spanset- 3:4>
+  4
+  3
+  $ log '-a-b-c-:+a+b+c+'
+  4
+  3
 
   $ try -- -a-b-c--a # complains
   (minus
@@ -311,6 +353,9 @@
   $ log 'date('
   hg: parse error at 5: not a prefix: end
   [255]
+  $ log 'date("\xy")'
+  hg: parse error: invalid \x escape
+  [255]
   $ log 'date(tip)'
   abort: invalid date: 'tip'
   [255]
@@ -521,6 +566,16 @@
   $ log 'keyword("test a")'
   $ log 'limit(head(), 1)'
   0
+  $ log 'limit(author("re:bob|test"), 3, 5)'
+  5
+  6
+  7
+  $ log 'limit(author("re:bob|test"), offset=6)'
+  6
+  $ log 'limit(author("re:bob|test"), offset=10)'
+  $ log 'limit(all(), 1, -1)'
+  hg: parse error: negative offset
+  [255]
   $ log 'matching(6)'
   6
   $ log 'matching(6:7, "phase parents user date branch summary files description substate")'
@@ -949,7 +1004,7 @@
       ('symbol', '4')))
   * set:
   <addset
-    <baseset [5, 3, 1]>,
+    <baseset- [1, 3, 5]>,
     <generatorset+>>
   5
   3
@@ -972,7 +1027,7 @@
   * set:
   <addset+
     <generatorset+>,
-    <baseset [5, 3, 1]>>
+    <baseset- [1, 3, 5]>>
   0
   1
   2
@@ -1283,6 +1338,9 @@
   $ log 'branch(unknown)'
   abort: unknown revision 'unknown'!
   [255]
+  $ log 'branch("literal:unknown")'
+  abort: branch 'unknown' does not exist!
+  [255]
   $ log 'branch("re:unknown")'
   $ log 'present(branch("unknown"))'
   $ log 'present(branch("re:unknown"))'
@@ -1473,10 +1531,16 @@
 (single rev)
 
   $ hg diff -r 'tip^' -r 'tip^'
-  $ hg diff -r 'tip^::tip^ or tip^'
+  $ hg diff -r 'tip^:tip^'
 
 (single rev that does not looks like a range)
 
+  $ hg diff -r 'tip^::tip^ or tip^'
+  diff -r d5d0dcbdc4d9 .hgtags
+  --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+  +++ b/.hgtags	* (glob)
+  @@ -0,0 +1,1 @@
+  +e0cc66ef77e8b6f711815af4e001a6594fde3ba5 1.0
   $ hg diff -r 'tip^ or tip^'
   diff -r d5d0dcbdc4d9 .hgtags
   --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
--- a/tests/test-run-tests.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-run-tests.t	Tue Oct 20 15:59:10 2015 -0500
@@ -393,6 +393,14 @@
   python hash seed: * (glob)
   [1]
 
+test --tmpdir support
+  $ run-tests.py --with-hg=`which hg` --tmpdir=$TESTTMP/keep test-success.t
+  
+  Keeping testtmp dir: $TESTTMP/keep/child1/test-success.t (glob)
+  Keeping threadtmp dir: $TESTTMP/keep/child1  (glob)
+  .
+  # Ran 1 tests, 0 skipped, 0 warned, 0 failed.
+
 test for --time
 ==================
 
@@ -580,7 +588,7 @@
   >   $ echo foo
   >   foo
   > EOF
-  $ run-tests.py test-hghave.t
+  $ run-tests.py $HGTEST_RUN_TESTS_PURE test-hghave.t
   .
   # Ran 1 tests, 0 skipped, 0 warned, 0 failed.
 
@@ -599,7 +607,7 @@
   >   #
   >   # check-code - a style and portability checker for Mercurial
   > EOF
-  $ run-tests.py test-runtestdir.t
+  $ run-tests.py $HGTEST_RUN_TESTS_PURE test-runtestdir.t
   .
   # Ran 1 tests, 0 skipped, 0 warned, 0 failed.
 
@@ -616,8 +624,22 @@
   >   $ custom-command.sh
   >   hello world
   > EOF
-  $ run-tests.py test-testdir-path.t
+  $ run-tests.py $HGTEST_RUN_TESTS_PURE test-testdir-path.t
   .
   # Ran 1 tests, 0 skipped, 0 warned, 0 failed.
 
 #endif
+
+test support for --allow-slow-tests
+  $ cat > test-very-slow-test.t <<EOF
+  > #require slow
+  >   $ echo pass
+  >   pass
+  > EOF
+  $ run-tests.py $HGTEST_RUN_TESTS_PURE test-very-slow-test.t
+  s
+  Skipped test-very-slow-test.t: skipped
+  # Ran 0 tests, 1 skipped, 0 warned, 0 failed.
+  $ run-tests.py $HGTEST_RUN_TESTS_PURE --allow-slow-tests test-very-slow-test.t
+  .
+  # Ran 1 tests, 0 skipped, 0 warned, 0 failed.
--- a/tests/test-setdiscovery.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-setdiscovery.t	Tue Oct 20 15:59:10 2015 -0500
@@ -350,7 +350,7 @@
   $ cut -d' ' -f6- access.log | grep -v cmd=known # cmd=known uses random sampling
   "GET /?cmd=capabilities HTTP/1.1" 200 -
   "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D513314ca8b3ae4dac8eec56966265b00fcf866db
-  "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477
+  "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477
   "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases
   $ cat errors.log
 
--- a/tests/test-shelve.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-shelve.t	Tue Oct 20 15:59:10 2015 -0500
@@ -301,8 +301,7 @@
   rebasing shelved changes
   rebasing 5:4702e8911fe0 "changes to '[mq]: second.patch'" (tip)
   merging a/a
-  warning: conflicts during merge.
-  merging a/a incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging a/a! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
   [1]
 
@@ -382,8 +381,7 @@
 redo the unshelve to get a conflict
 
   $ hg unshelve -q
-  warning: conflicts during merge.
-  merging a/a incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging a/a! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
   [1]
 
@@ -534,8 +532,12 @@
   0 files updated, 0 files merged, 1 files removed, 0 files unresolved
   $ hg --config extensions.mq=! shelve --list
   test            (*)    changes to 'create conflict' (glob)
+  $ hg bookmark
+   * test                      4:33f7f61e6c5e
   $ hg --config extensions.mq=! unshelve
   unshelving change 'test'
+  $ hg bookmark
+   * test                      4:33f7f61e6c5e
 
 shelve should leave dirstate clean (issue4055)
 
@@ -703,8 +705,7 @@
   rebasing shelved changes
   rebasing 5:23b29cada8ba "changes to 'commit stuff'" (tip)
   merging f
-  warning: conflicts during merge.
-  merging f incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging f! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
   [1]
   $ hg log -G --template '{rev}  {desc|firstline}  {author}  {date|isodate}'
@@ -759,8 +760,7 @@
   rebasing shelved changes
   rebasing 5:23b29cada8ba "changes to 'commit stuff'" (tip)
   merging f
-  warning: conflicts during merge.
-  merging f incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging f! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
   [1]
   $ hg st
@@ -796,15 +796,18 @@
   $ hg up test
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   (activating bookmark test)
+  $ hg bookmark
+   * test                      4:33f7f61e6c5e
   $ hg unshelve
   unshelving change 'default'
   rebasing shelved changes
   rebasing 5:4b555fdb4e96 "changes to 'second'" (tip)
   merging a/a
-  warning: conflicts during merge.
-  merging a/a incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging a/a! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
   [1]
+  $ hg bookmark
+     test                      4:33f7f61e6c5e
 
 Test that resolving all conflicts in one direction (so that the rebase
 is a no-op), works (issue4398)
@@ -817,6 +820,8 @@
   rebasing 5:4b555fdb4e96 "changes to 'second'" (tip)
   note: rebase of 5:4b555fdb4e96 created no changes to commit
   unshelve of 'default' complete
+  $ hg bookmark
+   * test                      4:33f7f61e6c5e
   $ hg diff
   $ hg status
   ? a/a.orig
@@ -900,12 +905,16 @@
   $ hg st
   M a/a
   ? foo/foo
+  $ hg bookmark
+   * test                      4:33f7f61e6c5e
   $ hg unshelve
   unshelving change 'test'
   temporarily committing pending changes (restore with 'hg unshelve --abort')
   rebasing shelved changes
   rebasing 6:65b5d1c34c34 "changes to 'create conflict'" (tip)
   merging a/a
+  $ hg bookmark
+   * test                      4:33f7f61e6c5e
   $ cat a/a
   a
   a
@@ -917,6 +926,7 @@
 
   $ hg up --clean .
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  (leaving bookmark test)
   $ hg shelve --list
   $ echo 'patch a' > shelf-patch-a
   $ hg add shelf-patch-a
@@ -954,3 +964,213 @@
   abort: cannot find shelf nonexistentshelf
   [255]
 
+  $ cd ..
+
+Shelve from general delta repo uses bundle2 on disk
+--------------------------------------------------
+
+no general delta
+
+  $ hg clone --pull repo bundle1 --config format.generaldelta=0
+  requesting all changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 5 changesets with 8 changes to 6 files
+  updating to branch default
+  6 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cd bundle1
+  $ echo babar > jungle
+  $ hg add jungle
+  $ hg shelve
+  shelved as default
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  $ hg debugbundle .hg/shelved/*.hg
+  7e30d8ac6f23cfc84330fd7e698730374615d21a
+  $ cd ..
+
+with general delta
+
+  $ hg clone --pull repo bundle2 --config format.generaldelta=1
+  requesting all changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 5 changesets with 8 changes to 6 files
+  updating to branch default
+  6 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cd bundle2
+  $ echo babar > jungle
+  $ hg add jungle
+  $ hg shelve
+  shelved as default
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  $ hg debugbundle .hg/shelved/*.hg
+  Stream params: {'Compression': 'BZ'}
+  changegroup -- "{'version': '02'}"
+      7e30d8ac6f23cfc84330fd7e698730374615d21a
+  $ cd ..
+
+Test visibility of in-memory changes inside transaction to external hook
+------------------------------------------------------------------------
+
+  $ cd repo
+
+  $ echo xxxx >> x
+  $ hg commit -m "#5: changes to invoke rebase"
+
+  $ cat > $TESTTMP/checkvisibility.sh <<EOF
+  > echo "==== \$1:"
+  > hg parents --template "VISIBLE {rev}:{node|short}\n"
+  > # test that pending changes are hidden
+  > unset HG_PENDING
+  > hg parents --template "ACTUAL  {rev}:{node|short}\n"
+  > echo "===="
+  > EOF
+
+  $ cat >> .hg/hgrc <<EOF
+  > [defaults]
+  > # to fix hash id of temporary revisions
+  > unshelve = --date '0 0'
+  > EOF
+
+"hg unshelve" at REV5 implies steps below:
+
+(1) commit changes in the working directory (REV6)
+(2) unbundle shelved revision (REV7)
+(3) rebase: merge REV7 into REV6 (REV6 => REV6, REV7)
+(4) rebase: commit merged revision (REV8)
+(5) rebase: update to REV6 (REV8 => REV6)
+(6) update to REV5 (REV6 => REV5)
+(7) abort transaction
+
+== test visibility to external preupdate hook
+
+  $ cat >> .hg/hgrc <<EOF
+  > [hooks]
+  > preupdate.visibility = sh $TESTTMP/checkvisibility.sh preupdate
+  > EOF
+
+  $ echo nnnn >> n
+
+  $ sh $TESTTMP/checkvisibility.sh before-unshelving
+  ==== before-unshelving:
+  VISIBLE 5:703117a2acfb
+  ACTUAL  5:703117a2acfb
+  ====
+
+  $ hg unshelve --keep default
+  temporarily committing pending changes (restore with 'hg unshelve --abort')
+  rebasing shelved changes
+  rebasing 7:fcbb97608399 "changes to 'create conflict'" (tip)
+  ==== preupdate:
+  VISIBLE 6:66b86db80ee4
+  ACTUAL  5:703117a2acfb
+  ====
+  ==== preupdate:
+  VISIBLE 8:cb2a4e59c2d5
+  ACTUAL  5:703117a2acfb
+  ====
+  ==== preupdate:
+  VISIBLE 6:66b86db80ee4
+  ACTUAL  5:703117a2acfb
+  ====
+
+  $ cat >> .hg/hgrc <<EOF
+  > [hooks]
+  > preupdate.visibility =
+  > EOF
+
+  $ sh $TESTTMP/checkvisibility.sh after-unshelving
+  ==== after-unshelving:
+  VISIBLE 5:703117a2acfb
+  ACTUAL  5:703117a2acfb
+  ====
+
+== test visibility to external update hook
+
+  $ hg update -q -C 5
+
+  $ cat >> .hg/hgrc <<EOF
+  > [hooks]
+  > update.visibility = sh $TESTTMP/checkvisibility.sh update
+  > EOF
+
+  $ echo nnnn >> n
+
+  $ sh $TESTTMP/checkvisibility.sh before-unshelving
+  ==== before-unshelving:
+  VISIBLE 5:703117a2acfb
+  ACTUAL  5:703117a2acfb
+  ====
+
+  $ hg unshelve --keep default
+  temporarily committing pending changes (restore with 'hg unshelve --abort')
+  rebasing shelved changes
+  rebasing 7:fcbb97608399 "changes to 'create conflict'" (tip)
+  ==== update:
+  VISIBLE 6:66b86db80ee4
+  VISIBLE 7:fcbb97608399
+  ACTUAL  5:703117a2acfb
+  ====
+  ==== update:
+  VISIBLE 6:66b86db80ee4
+  ACTUAL  5:703117a2acfb
+  ====
+  ==== update:
+  VISIBLE 5:703117a2acfb
+  ACTUAL  5:703117a2acfb
+  ====
+
+  $ cat >> .hg/hgrc <<EOF
+  > [hooks]
+  > update.visibility =
+  > EOF
+
+  $ sh $TESTTMP/checkvisibility.sh after-unshelving
+  ==== after-unshelving:
+  VISIBLE 5:703117a2acfb
+  ACTUAL  5:703117a2acfb
+  ====
+
+  $ cd ..
+
+test Abort unshelve always gets user out of the unshelved state
+---------------------------------------------------------------
+  $ hg init salvage
+  $ cd salvage
+  $ echo 'content' > root
+  $ hg commit -A -m 'root' -q
+  $ echo '' > root
+  $ hg shelve -q
+  $ echo 'contADDent' > root
+  $ hg unshelve -q
+  warning: conflicts while merging root! (edit, then use 'hg resolve --mark')
+  unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
+  [1]
+Wreak havoc on the unshelve process
+  $ rm .hg/unshelverebasestate
+  $ hg unshelve --abort
+  unshelve of 'default' aborted
+  abort: No such file or directory
+  [255]
+Can the user leave the current state?
+  $ hg up -C .
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Try again but with a corrupted shelve state file
+  $ hg strip -r 2 -r 1 -q
+  $ hg up -r 0 -q
+  $ echo '' > root
+  $ hg shelve -q
+  $ echo 'contADDent' > root
+  $ hg unshelve -q
+  warning: conflicts while merging root! (edit, then use 'hg resolve --mark')
+  unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
+  [1]
+  $ sed 's/ae8c668541e8/123456789012/' .hg/shelvedstate > ../corrupt-shelvedstate
+  $ mv ../corrupt-shelvedstate .hg/histedit-state
+  $ hg unshelve --abort 2>&1 | grep 'rebase aborted'
+  rebase aborted
+  $ hg up -C .
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-simplemerge.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-simplemerge.py	Tue Oct 20 15:59:10 2015 -0500
@@ -15,7 +15,7 @@
 
 import unittest
 from unittest import TestCase
-from mercurial import util, simplemerge
+from mercurial import util, simplemerge, error
 
 # bzr compatible interface, for the tests
 class Merge3(simplemerge.Merge3Text):
@@ -29,7 +29,7 @@
         atext = '\n'.join([i.strip('\n') for i in a] + [''])
         btext = '\n'.join([i.strip('\n') for i in b] + [''])
         if util.binary(basetext) or util.binary(atext) or util.binary(btext):
-            raise util.Abort("don't know how to merge binary files")
+            raise error.Abort("don't know how to merge binary files")
         simplemerge.Merge3Text.__init__(self, basetext, atext, btext,
                                         base, a, b)
 
@@ -321,7 +321,7 @@
         self.assertEquals(ml, MERGED_RESULT)
 
     def test_binary(self):
-        self.assertRaises(util.Abort, Merge3, ['\x00'], ['a'], ['b'])
+        self.assertRaises(error.Abort, Merge3, ['\x00'], ['a'], ['b'])
 
     def test_dos_text(self):
         base_text = 'a\r\n'
--- a/tests/test-ssh-bundle1.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-ssh-bundle1.t	Tue Oct 20 15:59:10 2015 -0500
@@ -43,14 +43,14 @@
 repo not found error
 
   $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/nonexistent local
-  remote: abort: there is no Mercurial repository here (.hg not found)!
+  remote: abort: repository nonexistent not found!
   abort: no suitable response from remote hg!
   [255]
 
 non-existent absolute path
 
   $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy//`pwd`/nonexistent local
-  remote: abort: there is no Mercurial repository here (.hg not found)!
+  remote: abort: repository /$TESTTMP/nonexistent not found!
   abort: no suitable response from remote hg!
   [255]
 
@@ -128,7 +128,7 @@
 
   $ hg pull -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/doesnotexist
   pulling from ssh://user@dummy/doesnotexist
-  remote: abort: there is no Mercurial repository here (.hg not found)!
+  remote: abort: repository doesnotexist not found!
   abort: no suitable response from remote hg!
   [255]
 
--- a/tests/test-ssh.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-ssh.t	Tue Oct 20 15:59:10 2015 -0500
@@ -34,14 +34,14 @@
 repo not found error
 
   $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/nonexistent local
-  remote: abort: there is no Mercurial repository here (.hg not found)!
+  remote: abort: repository nonexistent not found!
   abort: no suitable response from remote hg!
   [255]
 
 non-existent absolute path
 
-  $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy//`pwd`/nonexistent local
-  remote: abort: there is no Mercurial repository here (.hg not found)!
+  $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/`pwd`/nonexistent local
+  remote: abort: repository $TESTTMP/nonexistent not found!
   abort: no suitable response from remote hg!
   [255]
 
@@ -119,7 +119,7 @@
 
   $ hg pull -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/doesnotexist
   pulling from ssh://user@dummy/doesnotexist
-  remote: abort: there is no Mercurial repository here (.hg not found)!
+  remote: abort: repository doesnotexist not found!
   abort: no suitable response from remote hg!
   [255]
 
@@ -471,7 +471,7 @@
 
   $ cat dummylog
   Got arguments 1:user@dummy 2:hg -R nonexistent serve --stdio
-  Got arguments 1:user@dummy 2:hg -R /$TESTTMP/nonexistent serve --stdio
+  Got arguments 1:user@dummy 2:hg -R $TESTTMP/nonexistent serve --stdio
   Got arguments 1:user@dummy 2:hg -R remote serve --stdio
   Got arguments 1:user@dummy 2:hg -R local-stream serve --stdio
   Got arguments 1:user@dummy 2:hg -R remote serve --stdio
--- a/tests/test-status-color.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-status-color.t	Tue Oct 20 15:59:10 2015 -0500
@@ -325,11 +325,9 @@
   created new head
   $ hg merge
   merging a
-  warning: conflicts during merge.
-  merging a incomplete! (edit conflicts, then use 'hg resolve --mark')
   merging b
-  warning: conflicts during merge.
-  merging b incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
+  warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
   0 files updated, 0 files merged, 0 files removed, 2 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
   [1]
--- a/tests/test-strip.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-strip.t	Tue Oct 20 15:59:10 2015 -0500
@@ -197,17 +197,8 @@
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     c
   
-  $ hg --config experimental.bundle2-exp=True --config experimental.strip-bundle2-version=INVALID strip 4
-  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  unknown strip-bundle2-version value 'INVALID'; should be one of ['01', '02']
-  saved backup bundle to $TESTTMP/test/.hg/strip-backup/264128213d29-0b39d6bf-backup.hg (glob)
-  $ hg debugbundle .hg/strip-backup/*
-  264128213d290d868c54642d13aeaa3675551a78
-  $ restore
 
-  $ hg up -C 4
-  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  $ hg --config experimental.bundle2-exp=True --config experimental.strip-bundle2-version=02 --traceback strip 4
+  $ hg --traceback strip 4
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   saved backup bundle to $TESTTMP/test/.hg/strip-backup/264128213d29-0b39d6bf-backup.hg (glob)
   $ hg parents
@@ -217,35 +208,7 @@
   summary:     b
   
   $ hg debugbundle .hg/strip-backup/*
-  Stream params: {}
-  changegroup -- "{'version': '02'}"
-      264128213d290d868c54642d13aeaa3675551a78
-  $ hg incoming .hg/strip-backup/*
-  comparing with .hg/strip-backup/264128213d29-0b39d6bf-backup.hg
-  searching for changes
-  changeset:   4:264128213d29
-  tag:         tip
-  parent:      1:ef3a871183d7
-  user:        test
-  date:        Thu Jan 01 00:00:00 1970 +0000
-  summary:     c
-  
-  $ restore
-  $ hg up -C 4
-  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  $ hg --config experimental.bundle2-exp=True --config experimental.strip-bundle2-version=02 --traceback strip 4
-  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  saved backup bundle to $TESTTMP/test/.hg/strip-backup/264128213d29-0b39d6bf-backup.hg (glob)
-  $ hg parents
-  changeset:   1:ef3a871183d7
-  user:        test
-  date:        Thu Jan 01 00:00:00 1970 +0000
-  summary:     b
-  
-  $ hg debugbundle .hg/strip-backup/*
-  Stream params: {}
-  changegroup -- "{'version': '02'}"
-      264128213d290d868c54642d13aeaa3675551a78
+  264128213d290d868c54642d13aeaa3675551a78
   $ hg pull .hg/strip-backup/*
   pulling from .hg/strip-backup/264128213d29-0b39d6bf-backup.hg
   searching for changes
@@ -468,9 +431,9 @@
 applied patches before strip
 
   $ hg qapplied
-  2.diff
-  3.diff
-  4.diff
+  d
+  e
+  f
 
 stripping revision in queue
 
@@ -481,7 +444,7 @@
 applied patches after stripping rev in queue
 
   $ hg qapplied
-  2.diff
+  d
 
 stripping ancestor of queue
 
@@ -697,7 +660,7 @@
 Test that we only bundle the stripped changesets (issue4736)
 ------------------------------------------------------------
 
-initialisation (previous repo is empty anyway)
+initialization (previous repo is empty anyway)
 
   $ hg init issue4736
   $ cd issue4736
@@ -844,7 +807,7 @@
   > EOF
   $ hg strip tip --config extensions.crash=$TESTTMP/crashstrip.py
   saved backup bundle to $TESTTMP/issue4736/.hg/strip-backup/5c51d8d6557d-70daef06-backup.hg (glob)
-  strip failed, full bundle stored in '$TESTTMP/issue4736/.hg/strip-backup/5c51d8d6557d-70daef06-backup.hg'
+  strip failed, full bundle stored in '$TESTTMP/issue4736/.hg/strip-backup/5c51d8d6557d-70daef06-backup.hg' (glob)
   abort: boom
   [255]
 
--- a/tests/test-subrepo-deep-nested-change.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-subrepo-deep-nested-change.t	Tue Oct 20 15:59:10 2015 -0500
@@ -466,7 +466,7 @@
 The local repo enables largefiles if a largefiles repo is cloned
   $ hg showconfig extensions
   abort: repository requires features unknown to this Mercurial: largefiles!
-  (see http://mercurial.selenic.com/wiki/MissingRequirement for more information)
+  (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
   [255]
   $ hg --config extensions.largefiles= clone -qU . ../lfclone
   $ cat ../lfclone/.hg/hgrc
--- a/tests/test-subrepo-svn.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-subrepo-svn.t	Tue Oct 20 15:59:10 2015 -0500
@@ -7,6 +7,10 @@
   $ SVNREPOURL=file://`$PYTHON -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"`
 #endif
 
+  $ filter_svn_output () {
+  >     egrep -v 'Committing|Transmitting|Updating' || true
+  > }
+
 create subversion repo
 
   $ WCROOT="`pwd`/svn-wc"
@@ -24,21 +28,12 @@
   $ svn add externals
   A         externals
   A         externals/other (glob)
-  $ svn ci -m 'Add alpha'
-  Adding         externals
-  Adding         externals/other (glob)
-  Adding         src
-  Adding         src/alpha (glob)
-  Transmitting file data ..
-  Committed revision 1.
+  $ svn ci -qm 'Add alpha'
   $ svn up -q
   $ echo "externals -r1 $SVNREPOURL/externals" > extdef
   $ svn propset -F extdef svn:externals src
   property 'svn:externals' set on 'src'
-  $ svn ci -m 'Setting externals'
-  Sending        src
-  
-  Committed revision 2.
+  $ svn ci -qm 'Setting externals'
   $ cd ..
 
 create hg repo
@@ -98,10 +93,9 @@
   commit: 1 modified, 1 subrepos
   update: (current)
   phases: 2 draft
-  $ hg commit --subrepos -m 'Message!' | grep -v Updating
+  $ hg commit --subrepos -m 'Message!' | filter_svn_output
   committing subrepository s
   Sending*s/alpha (glob)
-  Transmitting file data .
   Committed revision 3.
   
   Fetching external item into '*s/externals'* (glob)
@@ -128,9 +122,7 @@
 add an unrelated revision in svn and update the subrepo to without
 bringing any changes.
 
-  $ svn mkdir "$SVNREPOURL/unrelated" -m 'create unrelated'
-  
-  Committed revision 4.
+  $ svn mkdir "$SVNREPOURL/unrelated" -qm 'create unrelated'
   $ svn up -q s
   $ hg sum
   parent: 2:* tip (glob)
@@ -153,10 +145,7 @@
   $ echo xyz >> alpha
   $ svn propset svn:mime-type 'text/xml' alpha
   property 'svn:mime-type' set on 'alpha'
-  $ svn ci -m 'amend a from svn'
-  Sending        *alpha (glob)
-  Transmitting file data .
-  Committed revision 5.
+  $ svn ci -qm 'amend a from svn'
   $ cd ../../sub/t
 
 this commit from hg will fail
@@ -429,11 +418,7 @@
   $ svn add dir
   A         dir
   A         dir/epsilon.py (glob)
-  $ svn ci -m 'Add dir/epsilon.py'
-  Adding         *dir (glob)
-  Adding         *dir/epsilon.py (glob)
-  Transmitting file data .
-  Committed revision 6.
+  $ svn ci -qm 'Add dir/epsilon.py'
   $ cd ../..
   $ hg init rebaserepo
   $ cd rebaserepo
@@ -495,41 +480,26 @@
 Point to a Subversion branch which has since been deleted and recreated
 First, create that condition in the repository.
 
-  $ hg ci --subrepos -m cleanup | grep -v Updating
+  $ hg ci --subrepos -m cleanup | filter_svn_output
   committing subrepository obstruct
   Sending        obstruct/other (glob)
-  Transmitting file data .
   Committed revision 7.
   At revision 7.
-  $ svn mkdir -m "baseline" $SVNREPOURL/trunk
-  
-  Committed revision 8.
-  $ svn copy -m "initial branch" $SVNREPOURL/trunk $SVNREPOURL/branch
-  
-  Committed revision 9.
+  $ svn mkdir -qm "baseline" $SVNREPOURL/trunk
+  $ svn copy -qm "initial branch" $SVNREPOURL/trunk $SVNREPOURL/branch
   $ svn co --quiet "$SVNREPOURL"/branch tempwc
   $ cd tempwc
   $ echo "something old" > somethingold
   $ svn add somethingold
   A         somethingold
-  $ svn ci -m 'Something old'
-  Adding         somethingold
-  Transmitting file data .
-  Committed revision 10.
-  $ svn rm -m "remove branch" $SVNREPOURL/branch
-  
-  Committed revision 11.
-  $ svn copy -m "recreate branch" $SVNREPOURL/trunk $SVNREPOURL/branch
-  
-  Committed revision 12.
+  $ svn ci -qm 'Something old'
+  $ svn rm -qm "remove branch" $SVNREPOURL/branch
+  $ svn copy -qm "recreate branch" $SVNREPOURL/trunk $SVNREPOURL/branch
   $ svn up -q
   $ echo "something new" > somethingnew
   $ svn add somethingnew
   A         somethingnew
-  $ svn ci -m 'Something new'
-  Adding         somethingnew
-  Transmitting file data .
-  Committed revision 13.
+  $ svn ci -qm 'Something new'
   $ cd ..
   $ rm -rf tempwc
   $ svn co "$SVNREPOURL/branch"@10 recreated
@@ -610,15 +580,8 @@
   A         trunk/subdir (glob)
   A         trunk/subdir/a (glob)
   A         branches
-  $ svn ci -m addsubdir
-  Adding         branches
-  Adding         trunk/subdir (glob)
-  Adding         trunk/subdir/a (glob)
-  Transmitting file data .
-  Committed revision 14.
-  $ svn cp -m branchtrunk $SVNREPOURL/trunk $SVNREPOURL/branches/somebranch
-  
-  Committed revision 15.
+  $ svn ci -qm addsubdir
+  $ svn cp -qm branchtrunk $SVNREPOURL/trunk $SVNREPOURL/branches/somebranch
   $ cd ..
 
   $ hg init repo2
@@ -652,14 +615,7 @@
   A         sub
   A         sub/.hg (glob)
   A         sub/.hg/hgrc (glob)
-  $ svn ci -m 'add .hg/hgrc to be sanitized at hg update'
-  Adding         .hg
-  Adding         .hg/hgrc (glob)
-  Adding         sub
-  Adding         sub/.hg (glob)
-  Adding         sub/.hg/hgrc (glob)
-  Transmitting file data ..
-  Committed revision 16.
+  $ svn ci -qm 'add .hg/hgrc to be sanitized at hg update'
   $ svn up -q
   $ cd ..
   $ hg commit -S -m 'commit with svn revision including .hg/hgrc'
--- a/tests/test-subrepo.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-subrepo.t	Tue Oct 20 15:59:10 2015 -0500
@@ -259,7 +259,7 @@
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 1f14a2e2d3ec, local: f0d2028bf86d+, remote: 1831e14459c4
-   .hgsubstate: versions differ -> m
+   .hgsubstate: versions differ -> m (premerge)
   subrepo merge f0d2028bf86d+ 1831e14459c4 1f14a2e2d3ec
     subrepo t: other changed, get t:6747d179aa9a688023c4b0cad32e4c92bb7f34ad:hg
   getting subrepo t
@@ -285,7 +285,7 @@
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 1831e14459c4, local: e45c8b14af55+, remote: f94576341bcf
-   .hgsubstate: versions differ -> m
+   .hgsubstate: versions differ -> m (premerge)
   subrepo merge e45c8b14af55+ f94576341bcf 1831e14459c4
     subrepo t: both sides changed 
    subrepository t diverged (local revision: 20a0db6fbf6c, remote revision: 7af322bc1198)
@@ -296,12 +296,14 @@
    branchmerge: True, force: False, partial: False
    ancestor: 6747d179aa9a, local: 20a0db6fbf6c+, remote: 7af322bc1198
    preserving t for resolve of t
-   t: versions differ -> m
-  picked tool 'internal:merge' for t (binary False symlink False)
+   t: versions differ -> m (premerge)
+  picked tool ':merge' for t (binary False symlink False)
   merging t
   my t@20a0db6fbf6c+ other t@7af322bc1198 ancestor t@6747d179aa9a
-  warning: conflicts during merge.
-  merging t incomplete! (edit conflicts, then use 'hg resolve --mark')
+   t: versions differ -> m (merge)
+  picked tool ':merge' for t (binary False symlink False)
+  my t@20a0db6fbf6c+ other t@7af322bc1198 ancestor t@6747d179aa9a
+  warning: conflicts while merging t! (edit, then use 'hg resolve --mark')
   0 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
     subrepo t: merge with t:7af322bc1198a32402fe903e0b7ebcfc5c9bf8f4:hg
@@ -1028,8 +1030,8 @@
 
 Ensure a full traceback, not just the SubrepoAbort part
 
-  $ hg -R issue1852b update --traceback 2>&1 | grep 'raise util\.Abort'
-      raise util.Abort(_("default path for subrepository not found"))
+  $ hg -R issue1852b update --traceback 2>&1 | grep 'raise error\.Abort'
+      raise error.Abort(_("default path for subrepository not found"))
 
 Pull -u now doesn't help
 
--- a/tests/test-tag.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-tag.t	Tue Oct 20 15:59:10 2015 -0500
@@ -461,6 +461,13 @@
   $ hg log -r '.' -T "{latesttag % '{latesttag}\n'}"
   t4
   t6
+  $ hg log -r '.' -T "{latesttag('t4') % 'T: {tag}, C: {changes}, D: {distance}\n'}"
+  T: t4, C: 2, D: 2
+  $ hg log -r '.' -T "{latesttag('re:\d') % 'T: {tag}, C: {changes}, D: {distance}\n'}"
+  T: t4, C: 2, D: 2
+  T: t6, C: 2, D: 2
+  $ hg log -r . -T '{join(latesttag(), "*")}\n'
+  t4*t6
   $ hg ci -A -m4
   adding f4
   $ hg log -r 'wdir()' -T "{changessincelatesttag} changes since {latesttag}\n"
--- a/tests/test-tags.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-tags.t	Tue Oct 20 15:59:10 2015 -0500
@@ -137,11 +137,11 @@
   $ hg identify
   b9154636be93 tip
   $ hg blackbox -l 5
-  1970/01/01 00:00:00 bob> identify
-  1970/01/01 00:00:00 bob> writing 48 bytes to cache/hgtagsfnodes1
-  1970/01/01 00:00:00 bob> 0/1 cache hits/lookups in * seconds (glob)
-  1970/01/01 00:00:00 bob> writing .hg/cache/tags2-visible with 1 tags
-  1970/01/01 00:00:00 bob> identify exited 0 after ?.?? seconds (glob)
+  1970/01/01 00:00:00 bob (*)> identify (glob)
+  1970/01/01 00:00:00 bob (*)> writing 48 bytes to cache/hgtagsfnodes1 (glob)
+  1970/01/01 00:00:00 bob (*)> 0/1 cache hits/lookups in * seconds (glob)
+  1970/01/01 00:00:00 bob (*)> writing .hg/cache/tags2-visible with 1 tags (glob)
+  1970/01/01 00:00:00 bob (*)> identify exited 0 after ?.?? seconds (glob)
 
 Failure to acquire lock results in no write
 
@@ -150,11 +150,11 @@
   $ hg identify
   b9154636be93 tip
   $ hg blackbox -l 5
-  1970/01/01 00:00:00 bob> identify
-  1970/01/01 00:00:00 bob> not writing .hg/cache/hgtagsfnodes1 because lock cannot be acquired
-  1970/01/01 00:00:00 bob> 0/1 cache hits/lookups in * seconds (glob)
-  1970/01/01 00:00:00 bob> writing .hg/cache/tags2-visible with 1 tags
-  1970/01/01 00:00:00 bob> identify exited 0 after * seconds (glob)
+  1970/01/01 00:00:00 bob (*)> identify (glob)
+  1970/01/01 00:00:00 bob (*)> not writing .hg/cache/hgtagsfnodes1 because lock cannot be acquired (glob)
+  1970/01/01 00:00:00 bob (*)> 0/1 cache hits/lookups in * seconds (glob)
+  1970/01/01 00:00:00 bob (*)> writing .hg/cache/tags2-visible with 1 tags (glob)
+  1970/01/01 00:00:00 bob (*)> identify exited 0 after * seconds (glob)
 
   $ fnodescacheexists
   no fnodes cache
@@ -349,11 +349,11 @@
   bar                                1:78391a272241
 
   $ hg blackbox -l 5
-  1970/01/01 00:00:00 bob> tags
-  1970/01/01 00:00:00 bob> writing 24 bytes to cache/hgtagsfnodes1
-  1970/01/01 00:00:00 bob> 2/3 cache hits/lookups in * seconds (glob)
-  1970/01/01 00:00:00 bob> writing .hg/cache/tags2-visible with 1 tags
-  1970/01/01 00:00:00 bob> tags exited 0 after * seconds (glob)
+  1970/01/01 00:00:00 bob (*)> tags (glob)
+  1970/01/01 00:00:00 bob (*)> writing 24 bytes to cache/hgtagsfnodes1 (glob)
+  1970/01/01 00:00:00 bob (*)> 2/3 cache hits/lookups in * seconds (glob)
+  1970/01/01 00:00:00 bob (*)> writing .hg/cache/tags2-visible with 1 tags (glob)
+  1970/01/01 00:00:00 bob (*)> tags exited 0 after * seconds (glob)
 
 #if unix-permissions no-root
 Errors writing to .hgtags fnodes cache are silently ignored
@@ -369,11 +369,11 @@
   bar                                1:78391a272241
 
   $ hg blackbox -l 5
-  1970/01/01 00:00:00 bob> tags
-  1970/01/01 00:00:00 bob> couldn't write cache/hgtagsfnodes1: [Errno 13] Permission denied: '$TESTTMP/t2/.hg/cache/hgtagsfnodes1'
-  1970/01/01 00:00:00 bob> 2/3 cache hits/lookups in * seconds (glob)
-  1970/01/01 00:00:00 bob> writing .hg/cache/tags2-visible with 1 tags
-  1970/01/01 00:00:00 bob> tags exited 0 after * seconds (glob)
+  1970/01/01 00:00:00 bob (*)> tags (glob)
+  1970/01/01 00:00:00 bob (*)> couldn't write cache/hgtagsfnodes1: [Errno 13] Permission denied: '$TESTTMP/t2/.hg/cache/hgtagsfnodes1' (glob)
+  1970/01/01 00:00:00 bob (*)> 2/3 cache hits/lookups in * seconds (glob)
+  1970/01/01 00:00:00 bob (*)> writing .hg/cache/tags2-visible with 1 tags (glob)
+  1970/01/01 00:00:00 bob (*)> tags exited 0 after * seconds (glob)
 
   $ chmod a+w .hg/cache/hgtagsfnodes1
 
@@ -383,11 +383,11 @@
   bar                                1:78391a272241
 
   $ hg blackbox -l 5
-  1970/01/01 00:00:00 bob> tags
-  1970/01/01 00:00:00 bob> writing 24 bytes to cache/hgtagsfnodes1
-  1970/01/01 00:00:00 bob> 2/3 cache hits/lookups in * seconds (glob)
-  1970/01/01 00:00:00 bob> writing .hg/cache/tags2-visible with 1 tags
-  1970/01/01 00:00:00 bob> tags exited 0 after * seconds (glob)
+  1970/01/01 00:00:00 bob (*)> tags (glob)
+  1970/01/01 00:00:00 bob (*)> writing 24 bytes to cache/hgtagsfnodes1 (glob)
+  1970/01/01 00:00:00 bob (*)> 2/3 cache hits/lookups in * seconds (glob)
+  1970/01/01 00:00:00 bob (*)> writing .hg/cache/tags2-visible with 1 tags (glob)
+  1970/01/01 00:00:00 bob (*)> tags exited 0 after * seconds (glob)
 
   $ f --size .hg/cache/hgtagsfnodes1
   .hg/cache/hgtagsfnodes1: size=168
@@ -411,10 +411,10 @@
   bar                                1:78391a272241
 
   $ hg blackbox -l 4
-  1970/01/01 00:00:00 bob> writing 24 bytes to cache/hgtagsfnodes1
-  1970/01/01 00:00:00 bob> 2/3 cache hits/lookups in * seconds (glob)
-  1970/01/01 00:00:00 bob> writing .hg/cache/tags2-visible with 1 tags
-  1970/01/01 00:00:00 bob> tags exited 0 after * seconds (glob)
+  1970/01/01 00:00:00 bob (*)> writing 24 bytes to cache/hgtagsfnodes1 (glob)
+  1970/01/01 00:00:00 bob (*)> 2/3 cache hits/lookups in * seconds (glob)
+  1970/01/01 00:00:00 bob (*)> writing .hg/cache/tags2-visible with 1 tags (glob)
+  1970/01/01 00:00:00 bob (*)> tags exited 0 after * seconds (glob)
 
   $ f --size .hg/cache/hgtagsfnodes1
   .hg/cache/hgtagsfnodes1: size=120
@@ -427,11 +427,11 @@
   bar                                1:78391a272241
 
   $ hg blackbox -l 5
-  1970/01/01 00:00:00 bob> tags
-  1970/01/01 00:00:00 bob> writing 24 bytes to cache/hgtagsfnodes1
-  1970/01/01 00:00:00 bob> 2/3 cache hits/lookups in * seconds (glob)
-  1970/01/01 00:00:00 bob> writing .hg/cache/tags2-visible with 1 tags
-  1970/01/01 00:00:00 bob> tags exited 0 after * seconds (glob)
+  1970/01/01 00:00:00 bob (*)> tags (glob)
+  1970/01/01 00:00:00 bob (*)> writing 24 bytes to cache/hgtagsfnodes1 (glob)
+  1970/01/01 00:00:00 bob (*)> 2/3 cache hits/lookups in * seconds (glob)
+  1970/01/01 00:00:00 bob (*)> writing .hg/cache/tags2-visible with 1 tags (glob)
+  1970/01/01 00:00:00 bob (*)> tags exited 0 after * seconds (glob)
   $ f --size .hg/cache/hgtagsfnodes1
   .hg/cache/hgtagsfnodes1: size=144
 
--- a/tests/test-trusted.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-trusted.py	Tue Oct 20 15:59:10 2015 -0500
@@ -166,7 +166,7 @@
 print
 print "# error handling"
 
-def assertraises(f, exc=util.Abort):
+def assertraises(f, exc=error.Abort):
     try:
         f()
     except exc as inst:
--- a/tests/test-up-local-change.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-up-local-change.t	Tue Oct 20 15:59:10 2015 -0500
@@ -49,11 +49,14 @@
    preserving a for resolve of a
    b: remote created -> g
   getting b
-   a: versions differ -> m
+   a: versions differ -> m (premerge)
   picked tool 'true' for a (binary False symlink False)
   merging a
   my a@c19d34741b0a+ other a@1e71731e6fbb ancestor a@c19d34741b0a
-  launching merge tool: true *$TESTTMP/r2/a* * (glob)
+   a: versions differ -> m (merge)
+  picked tool 'true' for a (binary False symlink False)
+  my a@c19d34741b0a+ other a@1e71731e6fbb ancestor a@c19d34741b0a
+  launching merge tool: true *$TESTTMP/r2/a* * * (glob)
   merge tool returned: 0
   1 files updated, 1 files merged, 0 files removed, 0 files unresolved
   $ hg parents
@@ -70,11 +73,14 @@
    preserving a for resolve of a
    b: other deleted -> r
   removing b
-   a: versions differ -> m
+   a: versions differ -> m (premerge)
   picked tool 'true' for a (binary False symlink False)
   merging a
   my a@1e71731e6fbb+ other a@c19d34741b0a ancestor a@1e71731e6fbb
-  launching merge tool: true *$TESTTMP/r2/a* * (glob)
+   a: versions differ -> m (merge)
+  picked tool 'true' for a (binary False symlink False)
+  my a@1e71731e6fbb+ other a@c19d34741b0a ancestor a@1e71731e6fbb
+  launching merge tool: true *$TESTTMP/r2/a* * * (glob)
   merge tool returned: 0
   0 files updated, 1 files merged, 1 files removed, 0 files unresolved
   $ hg parents
@@ -99,11 +105,14 @@
    preserving a for resolve of a
    b: remote created -> g
   getting b
-   a: versions differ -> m
+   a: versions differ -> m (premerge)
   picked tool 'true' for a (binary False symlink False)
   merging a
   my a@c19d34741b0a+ other a@1e71731e6fbb ancestor a@c19d34741b0a
-  launching merge tool: true *$TESTTMP/r2/a* * (glob)
+   a: versions differ -> m (merge)
+  picked tool 'true' for a (binary False symlink False)
+  my a@c19d34741b0a+ other a@1e71731e6fbb ancestor a@c19d34741b0a
+  launching merge tool: true *$TESTTMP/r2/a* * * (glob)
   merge tool returned: 0
   1 files updated, 1 files merged, 0 files removed, 0 files unresolved
   $ hg parents
--- a/tests/test-update-branches.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-update-branches.t	Tue Oct 20 15:59:10 2015 -0500
@@ -260,3 +260,8 @@
   $ hg debugobsolete bd10386d478cd5a9faf2e604114c8e6da62d3889
   $ hg up
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Test experimental revset support
+
+  $ hg log -r '_destupdate()'
+  2:bd10386d478c 2 (no-eol)
--- a/tests/test-update-renames.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-update-renames.t	Tue Oct 20 15:59:10 2015 -0500
@@ -20,8 +20,7 @@
 
   $ hg up
   merging a and b to b
-  warning: conflicts during merge.
-  merging b incomplete! (edit conflicts, then use 'hg resolve --mark')
+  warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
   0 files updated, 0 files merged, 0 files removed, 1 files unresolved
   use 'hg resolve' to retry unresolved file merges
   [1]
--- a/tests/test-url.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-url.py	Tue Oct 20 15:59:10 2015 -0500
@@ -99,8 +99,8 @@
 
     >>> url('://192.0.2.16:80/')
     <url path: '://192.0.2.16:80/'>
-    >>> url('http://mercurial.selenic.com')
-    <url scheme: 'http', host: 'mercurial.selenic.com'>
+    >>> url('https://mercurial-scm.org')
+    <url scheme: 'https', host: 'mercurial-scm.org'>
     >>> url('/foo')
     <url path: '/foo'>
     >>> url('bundle:/foo')
@@ -174,7 +174,7 @@
 
     Non-localhost file URL:
 
-    >>> u = url('file://mercurial.selenic.com/foo')
+    >>> u = url('file://mercurial-scm.org/foo')
     Traceback (most recent call last):
       File "<stdin>", line 1, in ?
     Abort: file:// URLs can only refer to localhost
--- a/tests/test-win32text.t	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-win32text.t	Tue Oct 20 15:59:10 2015 -0500
@@ -370,7 +370,7 @@
 Trigger deprecation warning:
 
   $ hg id -t
-  win32text is deprecated: http://mercurial.selenic.com/wiki/Win32TextExtension
+  win32text is deprecated: https://mercurial-scm.org/wiki/Win32TextExtension
   tip
 
 Disable warning:
--- a/tests/test-wireproto.py	Thu Oct 08 23:24:38 2015 +0900
+++ b/tests/test-wireproto.py	Tue Oct 20 15:59:10 2015 -0500
@@ -12,6 +12,10 @@
 class clientpeer(wireproto.wirepeer):
     def __init__(self, serverrepo):
         self.serverrepo = serverrepo
+
+    def _capabilities(self):
+        return ['batch']
+
     def _call(self, cmd, **args):
         return wireproto.dispatch(self.serverrepo, proto(args), cmd)