--- a/.hgignore Wed Jun 06 13:28:49 2018 -0400
+++ b/.hgignore Wed Jun 06 13:31:24 2018 -0400
@@ -31,8 +31,8 @@
contrib/chg/chg
contrib/hgsh/hgsh
contrib/vagrant/.vagrant
-contrib/docker/debian-*
-contrib/docker/ubuntu-*
+contrib/packaging/docker/debian-*
+contrib/packaging/docker/ubuntu-*
dist
packages
doc/common.txt
--- a/Makefile Wed Jun 06 13:28:49 2018 -0400
+++ b/Makefile Wed Jun 06 13:31:24 2018 -0400
@@ -164,6 +164,37 @@
# Packaging targets
+packaging_targets := \
+ centos5 \
+ centos6 \
+ centos7 \
+ deb \
+ docker-centos5 \
+ docker-centos6 \
+ docker-centos7 \
+ docker-debian-jessie \
+ docker-debian-stretch \
+ docker-fedora20 \
+ docker-fedora21 \
+ docker-ubuntu-trusty \
+ docker-ubuntu-trusty-ppa \
+ docker-ubuntu-xenial \
+ docker-ubuntu-xenial-ppa \
+ docker-ubuntu-artful \
+ docker-ubuntu-artful-ppa \
+ docker-ubuntu-bionic \
+ docker-ubuntu-bionic-ppa \
+ fedora20 \
+ fedora21 \
+ linux-wheels \
+ linux-wheels-x86_64 \
+ linux-wheels-i686 \
+ ppa
+
+# Forward packaging targets for convenience.
+$(packaging_targets):
+ $(MAKE) -C contrib/packaging $@
+
osx:
rm -rf build/mercurial
/usr/bin/python2.7 setup.py install --optimize=1 \
@@ -197,127 +228,14 @@
--identifier org.mercurial-scm.mercurial \
--version "$${HGVER}" \
build/mercurial.pkg && \
- productbuild --distribution contrib/macosx/distribution.xml \
+ productbuild --distribution contrib/packaging/macosx/distribution.xml \
--package-path build/ \
--version "$${HGVER}" \
- --resources contrib/macosx/ \
+ --resources contrib/packaging/macosx/ \
"$${OUTPUTDIR:-dist/}"/Mercurial-"$${HGVER}"-macosx"$${OSXVER}".pkg
-deb:
- contrib/builddeb
-
-ppa:
- contrib/builddeb --source-only
-
-contrib/docker/debian-%: contrib/docker/debian.template
- sed "s/__CODENAME__/$*/" $< > $@
-
-docker-debian-jessie: contrib/docker/debian-jessie
- contrib/dockerdeb debian jessie
-
-docker-debian-stretch: contrib/docker/debian-stretch
- contrib/dockerdeb debian stretch
-
-contrib/docker/ubuntu-%: contrib/docker/ubuntu.template
- sed "s/__CODENAME__/$*/" $< > $@
-
-docker-ubuntu-trusty: contrib/docker/ubuntu-trusty
- contrib/dockerdeb ubuntu trusty
-
-docker-ubuntu-trusty-ppa: contrib/docker/ubuntu-trusty
- contrib/dockerdeb ubuntu trusty --source-only
-
-docker-ubuntu-xenial: contrib/docker/ubuntu-xenial
- contrib/dockerdeb ubuntu xenial
-
-docker-ubuntu-xenial-ppa: contrib/docker/ubuntu-xenial
- contrib/dockerdeb ubuntu xenial --source-only
-
-docker-ubuntu-artful: contrib/docker/ubuntu-artful
- contrib/dockerdeb ubuntu artful
-
-docker-ubuntu-artful-ppa: contrib/docker/ubuntu-artful
- contrib/dockerdeb ubuntu artful --source-only
-
-docker-ubuntu-bionic: contrib/docker/ubuntu-bionic
- contrib/dockerdeb ubuntu bionic
-
-docker-ubuntu-bionic-ppa: contrib/docker/ubuntu-bionic
- contrib/dockerdeb ubuntu bionic --source-only
-
-fedora20:
- mkdir -p packages/fedora20
- contrib/buildrpm
- cp rpmbuild/RPMS/*/* packages/fedora20
- cp rpmbuild/SRPMS/* packages/fedora20
- rm -rf rpmbuild
-
-docker-fedora20:
- mkdir -p packages/fedora20
- contrib/dockerrpm fedora20
-
-fedora21:
- mkdir -p packages/fedora21
- contrib/buildrpm
- cp rpmbuild/RPMS/*/* packages/fedora21
- cp rpmbuild/SRPMS/* packages/fedora21
- rm -rf rpmbuild
-
-docker-fedora21:
- mkdir -p packages/fedora21
- contrib/dockerrpm fedora21
-
-centos5:
- mkdir -p packages/centos5
- contrib/buildrpm --withpython
- cp rpmbuild/RPMS/*/* packages/centos5
- cp rpmbuild/SRPMS/* packages/centos5
-
-docker-centos5:
- mkdir -p packages/centos5
- contrib/dockerrpm centos5 --withpython
-
-centos6:
- mkdir -p packages/centos6
- contrib/buildrpm --withpython
- cp rpmbuild/RPMS/*/* packages/centos6
- cp rpmbuild/SRPMS/* packages/centos6
-
-docker-centos6:
- mkdir -p packages/centos6
- contrib/dockerrpm centos6 --withpython
-
-centos7:
- mkdir -p packages/centos7
- contrib/buildrpm
- cp rpmbuild/RPMS/*/* packages/centos7
- cp rpmbuild/SRPMS/* packages/centos7
-
-docker-centos7:
- mkdir -p packages/centos7
- contrib/dockerrpm centos7
-
-linux-wheels: linux-wheels-x86_64 linux-wheels-i686
-
-linux-wheels-x86_64:
- docker run -e "HGTEST_JOBS=$(shell nproc)" --rm -ti -v `pwd`:/src quay.io/pypa/manylinux1_x86_64 /src/contrib/build-linux-wheels.sh
-
-linux-wheels-i686:
- docker run -e "HGTEST_JOBS=$(shell nproc)" --rm -ti -v `pwd`:/src quay.io/pypa/manylinux1_i686 linux32 /src/contrib/build-linux-wheels.sh
-
.PHONY: help all local build doc cleanbutpackages clean install install-bin \
install-doc install-home install-home-bin install-home-doc \
dist dist-notests check tests check-code format-c update-pot \
- osx deb ppa \
- docker-debian-jessie \
- docker-debian-stretch \
- docker-ubuntu-trusty docker-ubuntu-trusty-ppa \
- docker-ubuntu-xenial docker-ubuntu-xenial-ppa \
- docker-ubuntu-artful docker-ubuntu-artful-ppa \
- docker-ubuntu-bionic docker-ubuntu-bionic-ppa \
- fedora20 docker-fedora20 \
- fedora21 docker-fedora21 \
- centos5 docker-centos5 \
- centos6 docker-centos6 \
- centos7 docker-centos7 \
- linux-wheels
+ $(packaging_targets) \
+ osx
--- a/contrib/build-linux-wheels.sh Wed Jun 06 13:28:49 2018 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,34 +0,0 @@
-#!/bin/bash
-# This file is directly inspired by
-# https://github.com/pypa/python-manylinux-demo/blob/master/travis/build-wheels.sh
-set -e -x
-
-PYTHON_TARGETS=$(ls -d /opt/python/cp27*/bin)
-
-# Create an user for the tests
-useradd hgbuilder
-
-# Bypass uid/gid problems
-cp -R /src /io && chown -R hgbuilder:hgbuilder /io
-
-# Compile wheels for Python 2.X
-for PYBIN in $PYTHON_TARGETS; do
- "${PYBIN}/pip" wheel /io/ -w wheelhouse/
-done
-
-# Bundle external shared libraries into the wheels with
-# auditwheel (https://github.com/pypa/auditwheel) repair.
-# It also fix the ABI tag on the wheel making it pip installable.
-for whl in wheelhouse/*.whl; do
- auditwheel repair "$whl" -w /src/wheelhouse/
-done
-
-# Install packages and run the tests for all Python versions
-cd /io/tests/
-
-for PYBIN in $PYTHON_TARGETS; do
- # Install mercurial wheel as root
- "${PYBIN}/pip" install mercurial --no-index -f /src/wheelhouse
- # But run tests as hgbuilder user (non-root)
- su hgbuilder -c "\"${PYBIN}/python\" /io/tests/run-tests.py --with-hg=\"${PYBIN}/hg\" --blacklist=/io/contrib/linux-wheel-centos5-blacklist"
-done
--- a/contrib/builddeb Wed Jun 06 13:28:49 2018 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,104 +0,0 @@
-#!/bin/sh -e
-#
-# Build a Mercurial debian package from the current repo
-#
-# Tested on Jessie (stable as of original script authoring.)
-
-. $(dirname $0)/packagelib.sh
-
-BUILD=1
-CLEANUP=1
-DISTID=`(lsb_release -is 2> /dev/null | tr '[:upper:]' '[:lower:]') || echo debian`
-CODENAME=`lsb_release -cs 2> /dev/null || echo unknown`
-DEBFLAGS=-b
-while [ "$1" ]; do
- case "$1" in
- --distid )
- shift
- DISTID="$1"
- shift
- ;;
- --codename )
- shift
- CODENAME="$1"
- shift
- ;;
- --cleanup )
- shift
- BUILD=
- ;;
- --build )
- shift
- CLEANUP=
- ;;
- --source-only )
- shift
- DEBFLAGS=-S
- ;;
- * )
- echo "Invalid parameter $1!" 1>&2
- exit 1
- ;;
- esac
-done
-
-trap "if [ '$CLEANUP' ] ; then rm -r '$PWD/debian' ; fi" EXIT
-
-set -u
-
-if [ ! -d .hg ]; then
- echo 'You are not inside a Mercurial repository!' 1>&2
- exit 1
-fi
-
-gethgversion
-debver="$version"
-if [ -n "$type" ] ; then
- debver="$debver~$type"
-fi
-if [ -n "$distance" ] ; then
- debver="$debver+$distance-$CODENAME-$node"
-elif [ "$DEBFLAGS" = "-S" ] ; then
- # for building a ppa (--source-only) for a release (distance == 0), we need
- # to version the distroseries so that we can upload to launchpad
- debver="$debver~${CODENAME}1"
-fi
-
-control=debian/control
-changelog=debian/changelog
-
-if [ "$BUILD" ]; then
- if [ -d debian ] ; then
- echo "Error! debian control directory already exists!"
- exit 1
- fi
-
- cp -r "$PWD"/contrib/debian debian
-
- sed -i.tmp "s/__VERSION__/$debver/" $changelog
- sed -i.tmp "s/__DATE__/$(date --rfc-2822)/" $changelog
- sed -i.tmp "s/__CODENAME__/$CODENAME/" $changelog
- rm $changelog.tmp
-
- # remove the node from the version string
- SRCFILE="mercurial_$(echo $debver | sed "s,-$node,,").orig.tar.gz"
- "$PWD/hg" archive $SRCFILE
- mv $SRCFILE ..
- debuild -us -uc -i -I $DEBFLAGS
- if [ $? != 0 ]; then
- echo 'debuild failed!'
- exit 1
- fi
-
-fi
-if [ "$CLEANUP" ] ; then
- echo
- OUTPUTDIR=${OUTPUTDIR:=packages/$DISTID-$CODENAME}
- mkdir -p "$OUTPUTDIR"
- find ../mercurial*.deb ../mercurial_*.build ../mercurial_*.changes \
- ../mercurial*.dsc ../mercurial*.gz \
- -type f -newer $control -print0 2>/dev/null | \
- xargs -Inarf -0 mv narf "$OUTPUTDIR"
- echo "Built packages for $debver:"
- find "$OUTPUTDIR" -type f -newer $control -name '*.deb'
-fi
--- a/contrib/buildrpm Wed Jun 06 13:28:49 2018 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,162 +0,0 @@
-#!/bin/bash -e
-#
-# Build a Mercurial RPM from the current repo
-#
-# Tested on
-# - Fedora 20
-# - CentOS 5
-# - centOS 6
-
-. $(dirname $0)/packagelib.sh
-
-BUILD=1
-RPMBUILDDIR="$PWD/rpmbuild"
-
-while [ "$1" ]; do
- case "$1" in
- --prepare )
- shift
- BUILD=
- ;;
- --withpython | --with-python)
- shift
- PYTHONVER=2.7.14
- PYTHONMD5=cee2e4b33ad3750da77b2e85f2f8b724
- ;;
- --rpmbuilddir )
- shift
- RPMBUILDDIR="$1"
- shift
- ;;
- * )
- echo "Invalid parameter $1!" 1>&2
- exit 1
- ;;
- esac
-done
-
-cd "`dirname $0`/.."
-
-specfile=$PWD/contrib/mercurial.spec
-if [ ! -f $specfile ]; then
- echo "Cannot find $specfile!" 1>&2
- exit 1
-fi
-
-if [ ! -d .hg ]; then
- echo 'You are not inside a Mercurial repository!' 1>&2
- exit 1
-fi
-
-gethgversion
-
-# TODO: handle distance/node set, and type set
-
-if [ -z "$type" ] ; then
- release=1
-else
- release=0.9_$type
-fi
-
-if [ -n "$distance" ] ; then
- release=$release+$distance_$node
-fi
-
-if [ "$PYTHONVER" ]; then
- release=$release+$PYTHONVER
- RPMPYTHONVER=$PYTHONVER
-else
- RPMPYTHONVER=%{nil}
-fi
-
-mkdir -p $RPMBUILDDIR/{SOURCES,BUILD,SRPMS,RPMS}
-$HG archive -t tgz $RPMBUILDDIR/SOURCES/mercurial-$version-$release.tar.gz
-if [ "$PYTHONVER" ]; then
-(
- mkdir -p build
- cd build
- PYTHON_SRCFILE=Python-$PYTHONVER.tgz
- [ -f $PYTHON_SRCFILE ] || curl -Lo $PYTHON_SRCFILE http://www.python.org/ftp/python/$PYTHONVER/$PYTHON_SRCFILE
- if [ "$PYTHONMD5" ]; then
- echo "$PYTHONMD5 $PYTHON_SRCFILE" | md5sum -w -c
- fi
- ln -f $PYTHON_SRCFILE $RPMBUILDDIR/SOURCES/$PYTHON_SRCFILE
-
- DOCUTILSVER=`sed -ne "s/^%global docutilsname docutils-//p" $specfile`
- DOCUTILS_SRCFILE=docutils-$DOCUTILSVER.tar.gz
- [ -f $DOCUTILS_SRCFILE ] || curl -Lo $DOCUTILS_SRCFILE http://downloads.sourceforge.net/project/docutils/docutils/$DOCUTILSVER/$DOCUTILS_SRCFILE
- DOCUTILSMD5=`sed -ne "s/^%global docutilsmd5 //p" $specfile`
- if [ "$DOCUTILSMD5" ]; then
- echo "$DOCUTILSMD5 $DOCUTILS_SRCFILE" | md5sum -w -c
- fi
- ln -f $DOCUTILS_SRCFILE $RPMBUILDDIR/SOURCES/$DOCUTILS_SRCFILE
-)
-fi
-
-mkdir -p $RPMBUILDDIR/SPECS
-rpmspec=$RPMBUILDDIR/SPECS/mercurial.spec
-
-sed -e "s,^Version:.*,Version: $version," \
- -e "s,^Release:.*,Release: $release," \
- $specfile > $rpmspec
-
-echo >> $rpmspec
-echo "%changelog" >> $rpmspec
-
-if echo $version | grep '+' > /dev/null 2>&1; then
- latesttag="`echo $version | sed -e 's/+.*//'`"
- $HG log -r .:"$latesttag" -fM \
- --template '{date|hgdate}\t{author}\t{desc|firstline}\n' | python -c '
-import sys, time
-
-def datestr(date, format):
- return time.strftime(format, time.gmtime(float(date[0]) - date[1]))
-
-changelog = []
-for l in sys.stdin.readlines():
- tok = l.split("\t")
- hgdate = tuple(int(v) for v in tok[0].split())
- changelog.append((datestr(hgdate, "%F"), tok[1], hgdate, tok[2]))
-prevtitle = ""
-for l in sorted(changelog, reverse=True):
- title = "* %s %s" % (datestr(l[2], "%a %b %d %Y"), l[1])
- if prevtitle != title:
- prevtitle = title
- print
- print title
- print "- %s" % l[3].strip()
-' >> $rpmspec
-
-else
-
- $HG log \
- --template '{date|hgdate}\t{author}\t{desc|firstline}\n' \
- .hgtags | python -c '
-import sys, time
-
-def datestr(date, format):
- return time.strftime(format, time.gmtime(float(date[0]) - date[1]))
-
-for l in sys.stdin.readlines():
- tok = l.split("\t")
- hgdate = tuple(int(v) for v in tok[0].split())
- print "* %s %s\n- %s" % (datestr(hgdate, "%a %b %d %Y"), tok[1], tok[2])
-' >> $rpmspec
-
-fi
-
-sed -i \
- -e "s/^%define withpython.*$/%define withpython $RPMPYTHONVER/" \
- $rpmspec
-
-if [ "$BUILD" ]; then
- rpmbuild --define "_topdir $RPMBUILDDIR" -ba $rpmspec --clean
- if [ $? = 0 ]; then
- echo
- echo "Built packages for $version-$release:"
- find $RPMBUILDDIR/*RPMS/ -type f -newer $rpmspec
- fi
-else
- echo "Prepared sources for $version-$release $rpmspec are in $RPMBUILDDIR/SOURCES/ - use like:"
- echo "rpmbuild --define '_topdir $RPMBUILDDIR' -ba $rpmspec --clean"
-fi
--- a/contrib/check-code.py Wed Jun 06 13:28:49 2018 -0400
+++ b/contrib/check-code.py Wed Jun 06 13:31:24 2018 -0400
@@ -340,7 +340,8 @@
(r'\butil\.Abort\b', "directly use error.Abort"),
(r'^@(\w*\.)?cachefunc', "module-level @cachefunc is risky, please avoid"),
(r'^import atexit', "don't use atexit, use ui.atexit"),
- (r'^import Queue', "don't use Queue, use util.queue + util.empty"),
+ (r'^import Queue', "don't use Queue, use pycompat.queue.Queue + "
+ "pycompat.queue.Empty"),
(r'^import cStringIO', "don't use cStringIO.StringIO, use util.stringio"),
(r'^import urllib', "don't use urllib, use util.urlreq/util.urlerr"),
(r'^import SocketServer', "don't use SockerServer, use util.socketserver"),
--- a/contrib/debian/cacerts.rc Wed Jun 06 13:28:49 2018 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,5 +0,0 @@
-# This config file points Mercurial at the system-wide certificate
-# store from the ca-certificates package.
-
-[web]
-cacerts = /etc/ssl/certs/ca-certificates.crt
--- a/contrib/debian/changelog Wed Jun 06 13:28:49 2018 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,5 +0,0 @@
-mercurial (__VERSION__) __CODENAME__; urgency=medium
-
- * Automated build performed by upstream.
-
- -- Mercurial Devel <mercurial-devel@mercurial-scm.org> __DATE__
--- a/contrib/debian/compat Wed Jun 06 13:28:49 2018 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-9
--- a/contrib/debian/control Wed Jun 06 13:28:49 2018 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,54 +0,0 @@
-Source: mercurial
-Section: vcs
-Priority: optional
-Maintainer: Mercurial Developers <mercurial-devel@mercurial-scm.org>
-Build-Depends:
- debhelper (>= 9),
- dh-python,
- less,
- netbase,
- python-all,
- python-all-dev,
- python-docutils,
- unzip,
- zip
-Standards-Version: 3.9.4
-X-Python-Version: >= 2.7
-
-Package: mercurial
-Depends:
- python,
- ${shlibs:Depends},
- ${misc:Depends},
- ${python:Depends},
- mercurial-common (= ${source:Version})
-Architecture: any
-Description: fast, easy to use, distributed revision control tool.
- Mercurial is a fast, lightweight Source Control Management system designed
- for efficient handling of very large distributed projects.
- .
- Its features include:
- * O(1) delta-compressed file storage and retrieval scheme
- * Complete cross-indexing of files and changesets for efficient exploration
- of project history
- * Robust SHA1-based integrity checking and append-only storage model
- * Decentralized development model with arbitrary merging between trees
- * Easy-to-use command-line interface
- * Integrated stand-alone web interface
- * Small Python codebase
-
-Package: mercurial-common
-Architecture: all
-Depends:
- ${misc:Depends},
- ${python:Depends},
-Recommends: mercurial (= ${source:Version}), ca-certificates
-Suggests: wish
-Breaks: mercurial (<< ${source:Version})
-Replaces: mercurial (<< 2.6.3)
-Description: easy-to-use, scalable distributed version control system (common files)
- Mercurial is a fast, lightweight Source Control Management system designed
- for efficient handling of very large distributed projects.
- .
- This package contains the architecture independent components of Mercurial,
- and is generally useless without the mercurial package.
--- a/contrib/debian/copyright Wed Jun 06 13:28:49 2018 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,27 +0,0 @@
-Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
-Upstream-Name: mercurial
-Source: https://www.mercurial-scm.org/
-
-Files: *
-Copyright: 2005-2018, Matt Mackall <mpm@selenic.com> and others.
-License: GPL-2+
- This program is free software; you can redistribute it
- and/or modify it under the terms of the GNU General Public
- License as published by the Free Software Foundation; either
- version 2 of the License, or (at your option) any later
- version.
- .
- This program is distributed in the hope that it will be
- useful, but WITHOUT ANY WARRANTY; without even the implied
- warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
- PURPOSE. See the GNU General Public License for more
- details.
- .
- You should have received a copy of the GNU General Public
- License along with this package; if not, write to the Free
- Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- Boston, MA 02110-1301 USA
- .
- On Debian systems, the full text of the GNU General Public
- License version 2 can be found in the file
- `/usr/share/common-licenses/GPL-2'.
--- a/contrib/debian/default-tools.rc Wed Jun 06 13:28:49 2018 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,5 +0,0 @@
-[ui]
-editor = sensible-editor
-
-[pager]
-pager = sensible-pager
--- a/contrib/debian/hgkpath.rc Wed Jun 06 13:28:49 2018 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,2 +0,0 @@
-[hgk]
-path = /usr/share/mercurial/hgk
--- a/contrib/debian/rules Wed Jun 06 13:28:49 2018 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,44 +0,0 @@
-#!/usr/bin/make -f
-# Uncomment this to turn on verbose mode.
-# export DH_VERBOSE=1
-
-CPUS=$(shell cat /proc/cpuinfo | grep -E ^processor | wc -l)
-
-%:
- dh $@ --with python2
-
-override_dh_auto_test:
- http_proxy='' dh_auto_test -- TESTFLAGS="-j$(CPUS)"
-
-override_dh_python2:
- dh_python2
- find debian/mercurial/usr/share -type d -empty -delete
-
-override_dh_install:
- python$(PYVERS) setup.py install --root "$(CURDIR)"/debian/mercurial --install-layout=deb
- # chg
- make -C contrib/chg \
- DESTDIR="$(CURDIR)"/debian/mercurial \
- PREFIX=/usr \
- clean install
- # remove arch-independent python stuff
- find "$(CURDIR)"/debian/mercurial/usr/lib \
- ! -name '*.so' ! -type d -delete , \
- -type d -empty -delete
- python$(PYVERS) setup.py install --root "$(CURDIR)/debian/mercurial-common" --install-layout=deb
- make install-doc PREFIX="$(CURDIR)"/debian/mercurial-common/usr
- # remove arch-dependent python stuff
- find "$(CURDIR)"/debian/mercurial-common/usr/lib \
- -name '*.so' ! -type d -delete , \
- -type d -empty -delete
- cp contrib/hg-ssh "$(CURDIR)"/debian/mercurial-common/usr/bin
- mkdir -p "$(CURDIR)"/debian/mercurial-common/usr/share/mercurial
- cp contrib/hgk "$(CURDIR)"/debian/mercurial-common/usr/share/mercurial
- mkdir -p "$(CURDIR)"/debian/mercurial-common/etc/mercurial/hgrc.d/
- cp contrib/debian/*.rc "$(CURDIR)"/debian/mercurial-common/etc/mercurial/hgrc.d/
- # completions
- mkdir -p "$(CURDIR)"/debian/mercurial-common/usr/share/bash-completion/completions
- cp contrib/bash_completion "$(CURDIR)"/debian/mercurial-common/usr/share/bash-completion/completions/hg
- mkdir -p "$(CURDIR)"/debian/mercurial-common/usr/share/zsh/vendor-completions
- cp contrib/zsh_completion "$(CURDIR)"/debian/mercurial-common/usr/share/zsh/vendor-completions/_hg
- rm "$(CURDIR)"/debian/mercurial-common/usr/bin/hg
--- a/contrib/docker/centos5 Wed Jun 06 13:28:49 2018 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,23 +0,0 @@
-FROM centos:centos5
-RUN \
- sed -i 's/^mirrorlist/#mirrorlist/' /etc/yum.repos.d/*.repo && \
- sed -i 's/^#\(baseurl=\)http:\/\/mirror.centos.org\/centos/\1http:\/\/vault.centos.org/' /etc/yum.repos.d/*.repo && \
- sed -i 's/\$releasever/5.11/' /etc/yum.repos.d/*.repo
-
-RUN yum install -y \
- gcc \
- gettext \
- make \
- python-devel \
- python-docutils \
- rpm-build \
- tar
-
-# For creating repo meta data
-RUN yum install -y \
- bzip2-devel \
- createrepo \
- ncurses-devel \
- openssl-devel \
- readline-devel \
- zlib-devel
--- a/contrib/docker/centos6 Wed Jun 06 13:28:49 2018 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,20 +0,0 @@
-FROM centos:centos6
-RUN yum install -y \
- gcc \
- gettext \
- make \
- python-devel \
- python-docutils \
- rpm-build \
- tar
-
-# For creating repo meta data
-RUN yum install -y createrepo
-
-# For python
-RUN yum install -y \
- bzip2-devel \
- ncurses-devel \
- openssl-devel \
- readline-devel \
- zlib-devel
--- a/contrib/docker/centos7 Wed Jun 06 13:28:49 2018 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,12 +0,0 @@
-FROM centos:centos7
-RUN yum install -y \
- gcc \
- gettext \
- make \
- python-devel \
- python-docutils \
- rpm-build \
- tar
-
-# For creating repo meta data
-RUN yum install -y createrepo
--- a/contrib/docker/debian.template Wed Jun 06 13:28:49 2018 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,12 +0,0 @@
-FROM debian:__CODENAME__
-RUN apt-get update && apt-get install -y \
- build-essential \
- debhelper \
- devscripts \
- dh-python \
- less \
- python \
- python-all-dev \
- python-docutils \
- unzip \
- zip
--- a/contrib/docker/fedora20 Wed Jun 06 13:28:49 2018 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,11 +0,0 @@
-FROM fedora:20
-RUN yum install -y \
- gcc \
- gettext \
- make \
- python-devel \
- python-docutils \
- rpm-build
-
-# For creating repo meta data
-RUN yum install -y createrepo
--- a/contrib/docker/fedora21 Wed Jun 06 13:28:49 2018 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,11 +0,0 @@
-FROM fedora:21
-RUN yum install -y \
- gcc \
- gettext \
- make \
- python-devel \
- python-docutils \
- rpm-build
-
-# For creating repo meta data
-RUN yum install -y createrepo
--- a/contrib/docker/ubuntu.template Wed Jun 06 13:28:49 2018 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,12 +0,0 @@
-FROM ubuntu:__CODENAME__
-RUN apt-get update && apt-get install -y \
- build-essential \
- debhelper \
- devscripts \
- dh-python \
- less \
- python \
- python-all-dev \
- python-docutils \
- unzip \
- zip
--- a/contrib/dockerdeb Wed Jun 06 13:28:49 2018 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,35 +0,0 @@
-#!/bin/bash -eu
-
-. $(dirname $0)/dockerlib.sh
-. $(dirname $0)/packagelib.sh
-
-BUILDDIR=$(dirname $0)
-export ROOTDIR=$(cd $BUILDDIR/.. > /dev/null; pwd)
-
-checkdocker
-
-DISTID="$1"
-CODENAME="$2"
-PLATFORM="$1-$2"
-shift; shift # extra params are passed to build process
-
-OUTPUTDIR=${OUTPUTDIR:=$ROOTDIR/packages/$PLATFORM}
-
-initcontainer $PLATFORM
-
-# debuild only appears to be able to save built debs etc to .., so we
-# have to share the .. of the current directory with the docker
-# container and hope it's writable. Whee.
-dn=$(basename $PWD)
-
-if [ $(uname) = "Darwin" ] ; then
- $DOCKER run -u $DBUILDUSER --rm -v $PWD/..:/mnt $CONTAINER \
- sh -c "cd /mnt/$dn && make clean && make local"
-fi
-$DOCKER run -u $DBUILDUSER --rm -v $PWD/..:/mnt $CONTAINER \
- sh -c "cd /mnt/$dn && DEB_BUILD_OPTIONS='${DEB_BUILD_OPTIONS:=}' contrib/builddeb --build --distid $DISTID --codename $CODENAME $@"
-contrib/builddeb --cleanup --distid $DISTID --codename $CODENAME
-if [ $(uname) = "Darwin" ] ; then
- $DOCKER run -u $DBUILDUSER --rm -v $PWD/..:/mnt $CONTAINER \
- sh -c "cd /mnt/$dn && make clean"
-fi
--- a/contrib/dockerlib.sh Wed Jun 06 13:28:49 2018 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,42 +0,0 @@
-#!/bin/sh -eu
-
-# This function exists to set up the DOCKER variable and verify that
-# it's the binary we expect. It also verifies that the docker service
-# is running on the system and we can talk to it.
-function checkdocker() {
- if which docker.io >> /dev/null 2>&1 ; then
- DOCKER=docker.io
- elif which docker >> /dev/null 2>&1 ; then
- DOCKER=docker
- else
- echo "Error: docker must be installed"
- exit 1
- fi
-
- $DOCKER -h 2> /dev/null | grep -q Jansens && { echo "Error: $DOCKER is the Docking System Tray - install docker.io instead"; exit 1; }
- $DOCKER version | grep -Eq "^Client( version)?:" || { echo "Error: unexpected output from \"$DOCKER version\""; exit 1; }
- $DOCKER version | grep -Eq "^Server( version)?:" || { echo "Error: could not get docker server version - check it is running and your permissions"; exit 1; }
-}
-
-# Construct a container and leave its name in $CONTAINER for future use.
-function initcontainer() {
- [ "$1" ] || { echo "Error: platform name must be specified"; exit 1; }
-
- DFILE="$ROOTDIR/contrib/docker/$1"
- [ -f "$DFILE" ] || { echo "Error: docker file $DFILE not found"; exit 1; }
-
- CONTAINER="hg-dockerrpm-$1"
- DBUILDUSER=build
- (
- cat $DFILE
- if [ $(uname) = "Darwin" ] ; then
- # The builder is using boot2docker on OS X, so we're going to
- # *guess* the uid of the user inside the VM that is actually
- # running docker. This is *very likely* to fail at some point.
- echo RUN useradd $DBUILDUSER -u 1000
- else
- echo RUN groupadd $DBUILDUSER -g `id -g` -o
- echo RUN useradd $DBUILDUSER -u `id -u` -g $DBUILDUSER -o
- fi
- ) | $DOCKER build --build-arg http_proxy --build-arg https_proxy --tag $CONTAINER -
-}
--- a/contrib/dockerrpm Wed Jun 06 13:28:49 2018 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,37 +0,0 @@
-#!/bin/bash -e
-
-. $(dirname $0)/dockerlib.sh
-
-BUILDDIR=$(dirname $0)
-export ROOTDIR=$(cd $BUILDDIR/..; pwd)
-
-checkdocker
-
-PLATFORM="$1"
-shift # extra params are passed to buildrpm
-
-initcontainer $PLATFORM
-
-RPMBUILDDIR=$ROOTDIR/packages/$PLATFORM
-contrib/buildrpm --rpmbuilddir $RPMBUILDDIR --prepare $*
-
-DSHARED=/mnt/shared
-$DOCKER run -e http_proxy -e https_proxy -u $DBUILDUSER --rm -v $RPMBUILDDIR:$DSHARED $CONTAINER \
- rpmbuild --define "_topdir $DSHARED" -ba $DSHARED/SPECS/mercurial.spec --clean
-
-$DOCKER run -e http_proxy -e https_proxy -u $DBUILDUSER --rm -v $RPMBUILDDIR:$DSHARED $CONTAINER \
- createrepo $DSHARED
-
-cat << EOF > $RPMBUILDDIR/mercurial.repo
-# Place this file in /etc/yum.repos.d/mercurial.repo
-[mercurial]
-name=Mercurial packages for $PLATFORM
-# baseurl=file://$RPMBUILDDIR/
-baseurl=http://hg.example.com/build/$PLATFORM/
-skip_if_unavailable=True
-gpgcheck=0
-enabled=1
-EOF
-
-echo
-echo "Build complete - results can be found in $RPMBUILDDIR"
--- a/contrib/fixpax.py Wed Jun 06 13:28:49 2018 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,64 +0,0 @@
-#!/usr/bin/env python
-# fixpax - fix ownership in bdist_mpkg output
-#
-# Copyright 2015 Matt Mackall <mpm@selenic.com>
-#
-# This software may be used and distributed according to the terms of the
-# MIT license (http://opensource.org/licenses/MIT)
-
-"""Set file ownership to 0 in an Archive.pax.gz.
-Suitable for fixing files bdist_mpkg output:
-*.mpkg/Contents/Packages/*.pkg/Contents/Archive.pax.gz
-"""
-
-from __future__ import absolute_import, print_function
-import gzip
-import os
-import sys
-
-def fixpax(iname, oname):
- i = gzip.GzipFile(iname)
- o = gzip.GzipFile(oname, "w")
-
- while True:
- magic = i.read(6)
- dev = i.read(6)
- ino = i.read(6)
- mode = i.read(6)
- i.read(6) # uid
- i.read(6) # gid
- nlink = i.read(6)
- rdev = i.read(6)
- mtime = i.read(11)
- namesize = i.read(6)
- filesize = i.read(11)
- name = i.read(int(namesize, 8))
- data = i.read(int(filesize, 8))
-
- o.write(magic)
- o.write(dev)
- o.write(ino)
- o.write(mode)
- o.write("000000")
- o.write("000000")
- o.write(nlink)
- o.write(rdev)
- o.write(mtime)
- o.write(namesize)
- o.write(filesize)
- o.write(name)
- o.write(data)
-
- if name.startswith("TRAILER!!!"):
- o.write(i.read())
- break
-
- o.close()
- i.close()
-
-if __name__ == '__main__':
- for iname in sys.argv[1:]:
- print('fixing file ownership in %s' % iname)
- oname = sys.argv[1] + '.tmp'
- fixpax(iname, oname)
- os.rename(oname, iname)
--- a/contrib/fuzz/Makefile Wed Jun 06 13:28:49 2018 -0400
+++ b/contrib/fuzz/Makefile Wed Jun 06 13:31:24 2018 -0400
@@ -1,40 +1,60 @@
+CC = clang
+CXX = clang++
+
+all: bdiff xdiff
+
+fuzzutil.o: fuzzutil.cc fuzzutil.h
+ $(CXX) $(CXXFLAGS) -g -O1 -fsanitize=fuzzer-no-link,address \
+ -std=c++17 \
+ -I../../mercurial -c -o fuzzutil.o fuzzutil.cc
+
+fuzzutil-oss-fuzz.o: fuzzutil.cc fuzzutil.h
+ $(CXX) $(CXXFLAGS) -std=c++17 \
+ -I../../mercurial -c -o fuzzutil-oss-fuzz.o fuzzutil.cc
+
bdiff.o: ../../mercurial/bdiff.c
- clang -g -O1 -fsanitize=fuzzer-no-link,address -c -o bdiff.o \
+ $(CC) $(CFLAGS) -fsanitize=fuzzer-no-link,address -c -o bdiff.o \
../../mercurial/bdiff.c
-bdiff: bdiff.cc bdiff.o
- clang -DHG_FUZZER_INCLUDE_MAIN=1 -g -O1 -fsanitize=fuzzer-no-link,address \
- -I../../mercurial bdiff.cc bdiff.o -o bdiff
+bdiff: bdiff.cc bdiff.o fuzzutil.o
+ $(CXX) $(CXXFLAGS) -DHG_FUZZER_INCLUDE_MAIN=1 -g -O1 -fsanitize=fuzzer-no-link,address \
+ -std=c++17 \
+ -I../../mercurial bdiff.cc bdiff.o fuzzutil.o -o bdiff
bdiff-oss-fuzz.o: ../../mercurial/bdiff.c
- $$CC $$CFLAGS -c -o bdiff-oss-fuzz.o ../../mercurial/bdiff.c
+ $(CC) $(CFLAGS) -c -o bdiff-oss-fuzz.o ../../mercurial/bdiff.c
-bdiff_fuzzer: bdiff.cc bdiff-oss-fuzz.o
- $$CXX $$CXXFLAGS -std=c++11 -I../../mercurial bdiff.cc \
- bdiff-oss-fuzz.o -lFuzzingEngine -o $$OUT/bdiff_fuzzer
+bdiff_fuzzer: bdiff.cc bdiff-oss-fuzz.o fuzzutil-oss-fuzz.o
+ $(CXX) $(CXXFLAGS) -std=c++17 -I../../mercurial bdiff.cc \
+ bdiff-oss-fuzz.o fuzzutil-oss-fuzz.o -lFuzzingEngine -o \
+ $$OUT/bdiff_fuzzer
x%.o: ../../mercurial/thirdparty/xdiff/x%.c ../../mercurial/thirdparty/xdiff/*.h
- clang -g -O1 -fsanitize=fuzzer-no-link,address -c \
+ $(CC) -g -O1 -fsanitize=fuzzer-no-link,address -c \
-o $@ \
$<
-xdiff: xdiff.cc xdiffi.o xprepare.o xutils.o
- clang -DHG_FUZZER_INCLUDE_MAIN=1 -g -O1 -fsanitize=fuzzer-no-link,address \
+xdiff: CXXFLAGS += -std=c++17
+xdiff: xdiff.cc xdiffi.o xprepare.o xutils.o fuzzutil.o
+ $(CXX) $(CXXFLAGS) -DHG_FUZZER_INCLUDE_MAIN=1 -g -O1 -fsanitize=fuzzer-no-link,address \
-I../../mercurial xdiff.cc \
- xdiffi.o xprepare.o xutils.o -o xdiff
+ xdiffi.o xprepare.o xutils.o fuzzutil.o -o xdiff
fuzz-x%.o: ../../mercurial/thirdparty/xdiff/x%.c ../../mercurial/thirdparty/xdiff/*.h
- $$CC $$CFLAGS -c \
+ $(CC) $(CFLAGS) -c \
-o $@ \
$<
-xdiff_fuzzer: xdiff.cc fuzz-xdiffi.o fuzz-xprepare.o fuzz-xutils.o
- $$CXX $$CXXFLAGS -std=c++11 -I../../mercurial xdiff.cc \
- fuzz-xdiffi.o fuzz-xprepare.o fuzz-xutils.o \
+xdiff_fuzzer: xdiff.cc fuzz-xdiffi.o fuzz-xprepare.o fuzz-xutils.o fuzzutil-oss-fuzz.o
+ $(CXX) $(CXXFLAGS) -std=c++17 -I../../mercurial xdiff.cc \
+ fuzz-xdiffi.o fuzz-xprepare.o fuzz-xutils.o fuzzutil-oss-fuzz.o \
-lFuzzingEngine -o $$OUT/xdiff_fuzzer
-all: bdiff xdiff
+clean:
+ $(RM) *.o *_fuzzer \
+ bdiff \
+ xdiff
oss-fuzz: bdiff_fuzzer xdiff_fuzzer
-.PHONY: all oss-fuzz
+.PHONY: all clean oss-fuzz
--- a/contrib/fuzz/bdiff.cc Wed Jun 06 13:28:49 2018 -0400
+++ b/contrib/fuzz/bdiff.cc Wed Jun 06 13:31:24 2018 -0400
@@ -6,30 +6,25 @@
* This software may be used and distributed according to the terms of
* the GNU General Public License, incorporated herein by reference.
*/
+#include <memory>
#include <stdlib.h>
+#include "fuzzutil.h"
+
extern "C" {
#include "bdiff.h"
int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size)
{
- if (!Size) {
+ auto maybe_inputs = SplitInputs(Data, Size);
+ if (!maybe_inputs) {
return 0;
}
- // figure out a random point in [0, Size] to split our input.
- size_t split = Data[0] / 255.0 * Size;
-
- // left input to diff is data[1:split]
- const uint8_t *left = Data + 1;
- // which has len split-1
- size_t left_size = split - 1;
- // right starts at the next byte after left ends
- const uint8_t *right = left + left_size;
- size_t right_size = Size - split;
+ auto inputs = std::move(maybe_inputs.value());
struct bdiff_line *a, *b;
- int an = bdiff_splitlines((const char *)left, split - 1, &a);
- int bn = bdiff_splitlines((const char *)right, right_size, &b);
+ int an = bdiff_splitlines(inputs.left.get(), inputs.left_size, &a);
+ int bn = bdiff_splitlines(inputs.right.get(), inputs.right_size, &b);
struct bdiff_hunk l;
bdiff_diff(a, an, b, bn, &l);
free(a);
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/fuzz/fuzzutil.cc Wed Jun 06 13:31:24 2018 -0400
@@ -0,0 +1,27 @@
+#include "fuzzutil.h"
+
+#include <cstring>
+#include <utility>
+
+contrib::optional<two_inputs> SplitInputs(const uint8_t *Data, size_t Size)
+{
+ if (!Size) {
+ return contrib::nullopt;
+ }
+ // figure out a random point in [0, Size] to split our input.
+ size_t left_size = (Data[0] / 255.0) * (Size - 1);
+
+ // Copy inputs to new allocations so if bdiff over-reads
+ // AddressSanitizer can detect it.
+ std::unique_ptr<char[]> left(new char[left_size]);
+ std::memcpy(left.get(), Data + 1, left_size);
+ // right starts at the next byte after left ends
+ size_t right_size = Size - (left_size + 1);
+ std::unique_ptr<char[]> right(new char[right_size]);
+ std::memcpy(right.get(), Data + 1 + left_size, right_size);
+ LOG(2) << "inputs are " << left_size << " and " << right_size
+ << " bytes" << std::endl;
+ two_inputs result = {std::move(right), right_size, std::move(left),
+ left_size};
+ return result;
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/fuzz/fuzzutil.h Wed Jun 06 13:31:24 2018 -0400
@@ -0,0 +1,47 @@
+#ifndef CONTRIB_FUZZ_FUZZUTIL_H
+#define CONTRIB_FUZZ_FUZZUTIL_H
+#include <iostream>
+#include <memory>
+#include <stdint.h>
+
+/* Try and use std::optional, but failing that assume we'll have a
+ * workable https://abseil.io/ install on the include path to get
+ * their backport of std::optional. */
+#ifdef __has_include
+#if __has_include(<optional>) && __cplusplus >= 201703L
+#include <optional>
+#define CONTRIB_FUZZ_HAVE_STD_OPTIONAL
+#endif
+#endif
+#ifdef CONTRIB_FUZZ_HAVE_STD_OPTIONAL
+namespace contrib
+{
+using std::nullopt;
+using std::optional;
+} /* namespace contrib */
+#else
+#include "third_party/absl/types/optional.h"
+namespace contrib
+{
+using absl::nullopt;
+using absl::optional;
+} /* namespace contrib */
+#endif
+
+/* set DEBUG to 1 for a few debugging prints, or 2 for a lot */
+#define DEBUG 0
+#define LOG(level) \
+ if (level <= DEBUG) \
+ std::cout
+
+struct two_inputs {
+ std::unique_ptr<char[]> right;
+ size_t right_size;
+ std::unique_ptr<char[]> left;
+ size_t left_size;
+};
+
+/* Split a non-zero-length input into two inputs. */
+contrib::optional<two_inputs> SplitInputs(const uint8_t *Data, size_t Size);
+
+#endif /* CONTRIB_FUZZ_FUZZUTIL_H */
--- a/contrib/fuzz/xdiff.cc Wed Jun 06 13:28:49 2018 -0400
+++ b/contrib/fuzz/xdiff.cc Wed Jun 06 13:31:24 2018 -0400
@@ -10,6 +10,8 @@
#include <inttypes.h>
#include <stdlib.h>
+#include "fuzzutil.h"
+
extern "C" {
int hunk_consumer(long a1, long a2, long b1, long b2, void *priv)
@@ -20,21 +22,17 @@
int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size)
{
- if (!Size) {
+ auto maybe_inputs = SplitInputs(Data, Size);
+ if (!maybe_inputs) {
return 0;
}
- // figure out a random point in [0, Size] to split our input.
- size_t split = Data[0] / 255.0 * Size;
-
+ auto inputs = std::move(maybe_inputs.value());
mmfile_t a, b;
- // `a` input to diff is data[1:split]
- a.ptr = (char *)Data + 1;
- // which has len split-1
- a.size = split - 1;
- // `b` starts at the next byte after `a` ends
- b.ptr = a.ptr + a.size;
- b.size = Size - split;
+ a.ptr = inputs.left.get();
+ a.size = inputs.left_size;
+ b.ptr = inputs.right.get();
+ b.size = inputs.right_size;
xpparam_t xpp = {
XDF_INDENT_HEURISTIC, /* flags */
};
--- a/contrib/hg-ssh Wed Jun 06 13:28:49 2018 -0400
+++ b/contrib/hg-ssh Wed Jun 06 13:31:24 2018 -0400
@@ -39,10 +39,14 @@
from mercurial import (
dispatch,
+ pycompat,
ui as uimod,
)
def main():
+ # Prevent insertion/deletion of CRs
+ dispatch.initstdio()
+
cwd = os.getcwd()
readonly = False
args = sys.argv[1:]
@@ -66,15 +70,15 @@
path = cmdargv[2]
repo = os.path.normpath(os.path.join(cwd, os.path.expanduser(path)))
if repo in allowed_paths:
- cmd = ['-R', repo, 'serve', '--stdio']
+ cmd = [b'-R', pycompat.fsencode(repo), b'serve', b'--stdio']
req = dispatch.request(cmd)
if readonly:
if not req.ui:
req.ui = uimod.ui.load()
- req.ui.setconfig('hooks', 'pretxnopen.hg-ssh',
- 'python:__main__.rejectpush', 'hg-ssh')
- req.ui.setconfig('hooks', 'prepushkey.hg-ssh',
- 'python:__main__.rejectpush', 'hg-ssh')
+ req.ui.setconfig(b'hooks', b'pretxnopen.hg-ssh',
+ b'python:__main__.rejectpush', b'hg-ssh')
+ req.ui.setconfig(b'hooks', b'prepushkey.hg-ssh',
+ b'python:__main__.rejectpush', b'hg-ssh')
dispatch.dispatch(req)
else:
sys.stderr.write('Illegal repository "%s"\n' % repo)
@@ -84,7 +88,7 @@
sys.exit(255)
def rejectpush(ui, **kwargs):
- ui.warn(("Permission denied\n"))
+ ui.warn((b"Permission denied\n"))
# mercurial hooks use unix process conventions for hook return values
# so a truthy return means failure
return True
--- a/contrib/linux-wheel-centos5-blacklist Wed Jun 06 13:28:49 2018 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,3 +0,0 @@
-test-convert-git.t
-test-subrepo-git.t
-test-patchbomb-tls.t
--- a/contrib/macosx/Readme.html Wed Jun 06 13:28:49 2018 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,37 +0,0 @@
-<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">
-<!-- This is the second screen displayed during the install. -->
-<html>
-<head>
- <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
- <meta http-equiv="Content-Style-Type" content="text/css">
- <title>Read Me - Important Information</title>
- <style type="text/css">
- p.p1 {margin: 0.0px 0.0px 0.0px 0.0px; font: 14.0px Helvetica}
- p.p2 {margin: 0.0px 0.0px 0.0px 0.0px; font: 12.0px Helvetica; min-height: 14.0px}
- p.p3 {margin: 0.0px 0.0px 0.0px 0.0px; font: 12.0px Helvetica}
- p.p4 {margin: 0.0px 0.0px 0.0px 0.0px; font: 12.0px Helvetica; color: #000fed}
- span.s1 {text-decoration: underline}
- span.s2 {font: 12.0px Courier}
- </style>
-</head>
-<body>
-<p class="p1"><b>Before you install</b></p>
-<p class="p2"><br></p>
-<p class="p3">This is an OS X version of Mercurial that depends on the default Python installation.</p>
-<p class="p2"><br></p>
-<p class="p1"><b>After you install</b></p>
-<p class="p2"><br></p>
-<p class="p3">This package installs the <span class="s2">hg</span> executable as <span class="s2">/usr/local/bin/hg</span>. See <span class="s2">hg debuginstall</span> for more info on file locations.</p>
-<p class="p2"><br></p>
-<p class="p1"><b>Documentation</b></p>
-<p class="p2"><br></p>
-<p class="p3">Visit the <a href="https://mercurial-scm.org/">Mercurial web site and wiki</a></p>
-<p class="p2"><br></p>
-<p class="p3">There's also a free book, <a href="https://book.mercurial-scm.org/">Distributed revision control with Mercurial</a></p>
-<p class="p2"><br></p>
-<p class="p1"><b>Reporting problems</b></p>
-<p class="p2"><br></p>
-<p class="p3">If you run into any problems, please file a bug online:</p>
-<p class="p3"><a href="https://bz.mercurial-scm.org/">https://bz.mercurial-scm.org/</a></p>
-</body>
-</html>
--- a/contrib/macosx/Welcome.html Wed Jun 06 13:28:49 2018 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,20 +0,0 @@
-<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">
-<!-- This is the first screen displayed during the install. -->
-<html>
-<head>
- <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
- <meta http-equiv="Content-Style-Type" content="text/css">
- <title></title>
- <style type="text/css">
- p.p1 {margin: 0.0px 0.0px 0.0px 0.0px; font: 14.0px Helvetica}
- p.p2 {margin: 0.0px 0.0px 0.0px 0.0px; font: 12.0px Helvetica; min-height: 14.0px}
- </style>
-</head>
-<body>
-<p class="p1">This is a prepackaged release of <a href="https://mercurial-scm.org/">Mercurial</a> for Mac OS X.</p>
-<p class="p2"><br></p>
-<br>
-<p>
-Please be sure to read the latest <a href="https://mercurial-scm.org/wiki/WhatsNew">release notes</a>.</p>
-</body>
-</html>
--- a/contrib/macosx/distribution.xml Wed Jun 06 13:28:49 2018 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,19 +0,0 @@
-<?xml version="1.0" encoding="utf-8" standalone="no"?>
-<installer-gui-script minSpecVersion="1">
- <title>Mercurial SCM</title>
- <organization>org.mercurial-scm</organization>
- <options customize="never" require-scripts="false" rootVolumeOnly="true" />
- <welcome file="Welcome.html" mime-type="text/html" />
- <license file="../../COPYING" mime-type="text/plain" />
- <readme file="Readme.html" mime-type="text/html" />
- <pkg-ref id="org.mercurial-scm.mercurial"
- version="0"
- auth="root"
- onConclusion="none">mercurial.pkg</pkg-ref>
- <choices-outline>
- <line choice="org.mercurial-scm.mercurial"/>
- </choices-outline>
- <choice id="org.mercurial-scm.mercurial" visible="false">
- <pkg-ref id="org.mercurial-scm.mercurial"/>
- </choice>
-</installer-gui-script>
--- a/contrib/mercurial.spec Wed Jun 06 13:28:49 2018 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,164 +0,0 @@
-%global emacs_lispdir %{_datadir}/emacs/site-lisp
-
-%define withpython %{nil}
-
-%if "%{?withpython}"
-
-%global pythonver %{withpython}
-%global pythonname Python-%{withpython}
-%global docutilsname docutils-0.14
-%global docutilsmd5 c53768d63db3873b7d452833553469de
-%global pythonhg python-hg
-%global hgpyprefix /opt/%{pythonhg}
-# byte compilation will fail on some some Python /test/ files
-%global _python_bytecompile_errors_terminate_build 0
-
-%else
-
-%global pythonver %(python -c 'import sys;print ".".join(map(str, sys.version_info[:2]))')
-
-%endif
-
-Summary: A fast, lightweight Source Control Management system
-Name: mercurial
-Version: snapshot
-Release: 0
-License: GPLv2+
-Group: Development/Tools
-URL: https://mercurial-scm.org/
-Source0: %{name}-%{version}-%{release}.tar.gz
-%if "%{?withpython}"
-Source1: %{pythonname}.tgz
-Source2: %{docutilsname}.tar.gz
-%endif
-BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root
-
-BuildRequires: make, gcc, gettext
-%if "%{?withpython}"
-BuildRequires: readline-devel, openssl-devel, ncurses-devel, zlib-devel, bzip2-devel
-%else
-BuildRequires: python >= 2.7, python-devel, python-docutils >= 0.5
-Requires: python >= 2.7
-%endif
-# The hgk extension uses the wish tcl interpreter, but we don't enforce it
-#Requires: tk
-
-%description
-Mercurial is a fast, lightweight source control management system designed
-for efficient handling of very large distributed projects.
-
-%prep
-
-%if "%{?withpython}"
-%setup -q -n mercurial-%{version}-%{release} -a1 -a2
-# despite the comments in cgi.py, we do this to prevent rpmdeps from picking /usr/local/bin/python up
-sed -i '1c#! /usr/bin/env python' %{pythonname}/Lib/cgi.py
-%else
-%setup -q -n mercurial-%{version}-%{release}
-%endif
-
-%build
-
-%if "%{?withpython}"
-
-PYPATH=$PWD/%{pythonname}
-cd $PYPATH
-./configure --prefix=%{hgpyprefix}
-make all %{?_smp_mflags}
-cd -
-
-cd %{docutilsname}
-LD_LIBRARY_PATH=$PYPATH $PYPATH/python setup.py build
-cd -
-
-# verify Python environment
-LD_LIBRARY_PATH=$PYPATH PYTHONPATH=$PWD/%{docutilsname} $PYPATH/python -c 'import sys, zlib, bz2, ssl, curses, readline'
-
-# set environment for make
-export PATH=$PYPATH:$PATH
-export LD_LIBRARY_PATH=$PYPATH
-export CFLAGS="-L $PYPATH"
-export PYTHONPATH=$PWD/%{docutilsname}
-
-%endif
-
-make all
-make -C contrib/chg
-
-%install
-rm -rf $RPM_BUILD_ROOT
-
-%if "%{?withpython}"
-
-PYPATH=$PWD/%{pythonname}
-cd $PYPATH
-make install DESTDIR=$RPM_BUILD_ROOT
-# these .a are not necessary and they are readonly and strip fails - kill them!
-rm -f %{buildroot}%{hgpyprefix}/lib/{,python2.*/config}/libpython2.*.a
-cd -
-
-cd %{docutilsname}
-LD_LIBRARY_PATH=$PYPATH $PYPATH/python setup.py install --root="$RPM_BUILD_ROOT"
-cd -
-
-PATH=$PYPATH:$PATH LD_LIBRARY_PATH=$PYPATH make install DESTDIR=$RPM_BUILD_ROOT PREFIX=%{hgpyprefix} MANDIR=%{_mandir}
-mkdir -p $RPM_BUILD_ROOT%{_bindir}
-( cd $RPM_BUILD_ROOT%{_bindir}/ && ln -s ../..%{hgpyprefix}/bin/hg . )
-( cd $RPM_BUILD_ROOT%{_bindir}/ && ln -s ../..%{hgpyprefix}/bin/python2.? %{pythonhg} )
-
-%else
-
-make install DESTDIR=$RPM_BUILD_ROOT PREFIX=%{_prefix} MANDIR=%{_mandir}
-
-%endif
-
-install -m 755 contrib/chg/chg $RPM_BUILD_ROOT%{_bindir}/
-install -m 755 contrib/hgk $RPM_BUILD_ROOT%{_bindir}/
-install -m 755 contrib/hg-ssh $RPM_BUILD_ROOT%{_bindir}/
-
-bash_completion_dir=$RPM_BUILD_ROOT%{_sysconfdir}/bash_completion.d
-mkdir -p $bash_completion_dir
-install -m 644 contrib/bash_completion $bash_completion_dir/mercurial.sh
-
-zsh_completion_dir=$RPM_BUILD_ROOT%{_datadir}/zsh/site-functions
-mkdir -p $zsh_completion_dir
-install -m 644 contrib/zsh_completion $zsh_completion_dir/_mercurial
-
-mkdir -p $RPM_BUILD_ROOT%{emacs_lispdir}
-install -m 644 contrib/mercurial.el $RPM_BUILD_ROOT%{emacs_lispdir}/
-install -m 644 contrib/mq.el $RPM_BUILD_ROOT%{emacs_lispdir}/
-
-mkdir -p $RPM_BUILD_ROOT/%{_sysconfdir}/mercurial/hgrc.d
-
-%clean
-rm -rf $RPM_BUILD_ROOT
-
-%files
-%defattr(-,root,root,-)
-%doc CONTRIBUTORS COPYING doc/README doc/hg*.txt doc/hg*.html *.cgi contrib/*.fcgi
-%doc %attr(644,root,root) %{_mandir}/man?/hg*
-%doc %attr(644,root,root) contrib/*.svg
-%dir %{_datadir}/zsh/
-%dir %{_datadir}/zsh/site-functions/
-%{_datadir}/zsh/site-functions/_mercurial
-%dir %{_datadir}/emacs/site-lisp/
-%{_datadir}/emacs/site-lisp/mercurial.el
-%{_datadir}/emacs/site-lisp/mq.el
-%{_bindir}/hg
-%{_bindir}/chg
-%{_bindir}/hgk
-%{_bindir}/hg-ssh
-%dir %{_sysconfdir}/bash_completion.d/
-%config(noreplace) %{_sysconfdir}/bash_completion.d/mercurial.sh
-%dir %{_sysconfdir}/mercurial
-%dir %{_sysconfdir}/mercurial/hgrc.d
-%if "%{?withpython}"
-%{_bindir}/%{pythonhg}
-%{hgpyprefix}
-%else
-%{_libdir}/python%{pythonver}/site-packages/%{name}-*-py%{pythonver}.egg-info
-%{_libdir}/python%{pythonver}/site-packages/%{name}
-%{_libdir}/python%{pythonver}/site-packages/hgext
-%{_libdir}/python%{pythonver}/site-packages/hgext3rd
-%{_libdir}/python%{pythonver}/site-packages/hgdemandimport
-%endif
--- a/contrib/packagelib.sh Wed Jun 06 13:28:49 2018 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,38 +0,0 @@
-# Extract version number into 4 parts, some of which may be empty:
-#
-# version: the numeric part of the most recent tag. Will always look like 1.3.
-#
-# type: if an rc build, "rc", otherwise empty
-#
-# distance: the distance from the nearest tag, or empty if built from a tag
-#
-# node: the node|short hg was built from, or empty if built from a tag
-gethgversion() {
- export HGRCPATH=
- export HGPLAIN=
-
- make cleanbutpackages
- make local PURE=--pure
- HG="$PWD/hg"
-
- "$HG" version > /dev/null || { echo 'abort: hg version failed!'; exit 1 ; }
-
- hgversion=`LANGUAGE=C "$HG" version | sed -ne 's/.*(version \(.*\))$/\1/p'`
-
- if echo $hgversion | grep + > /dev/null 2>&1 ; then
- tmp=`echo $hgversion | cut -d+ -f 2`
- hgversion=`echo $hgversion | cut -d+ -f 1`
- distance=`echo $tmp | cut -d- -f 1`
- node=`echo $tmp | cut -d- -f 2`
- else
- distance=''
- node=''
- fi
- if echo $hgversion | grep -- '-' > /dev/null 2>&1; then
- version=`echo $hgversion | cut -d- -f1`
- type=`echo $hgversion | cut -d- -f2`
- else
- version=$hgversion
- type=''
- fi
-}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/packaging/Makefile Wed Jun 06 13:31:24 2018 -0400
@@ -0,0 +1,167 @@
+$(eval HGROOT := $(shell cd ../..; pwd))
+
+help:
+ @echo 'Packaging Make Targets'
+ @echo ''
+ @echo 'docker-centos-{5, 6, 7}'
+ @echo ' Build an RPM for a specific CentOS version using Docker.'
+ @echo ''
+ @echo 'docker-debian-{jessie, stretch}'
+ @echo ' Build Debian packages specific to a Debian distro using Docker.'
+ @echo ''
+ @echo 'docker-fedora-{20, 21}'
+ @echo ' Build an RPM for a specific Fedora version using Docker.'
+ @echo ''
+ @echo 'docker-ubuntu-{trusty, xenial, artful, bionic}'
+ @echo ' Build Debian package specific to an Ubuntu distro using Docker.'
+ @echo ''
+ @echo 'docker-ubuntu-{trusty, xenial, artful, bionic}-ppa'
+ @echo ' Build a source-only Debian package specific to an Ubuntu distro'
+ @echo ' using Docker.'
+ @echo ''
+ @echo 'linux-wheels'
+ @echo ' Build Linux manylinux wheels using Docker.'
+ @echo ''
+ @echo 'linux-wheels-{x86_64, i686}'
+ @echo ' Build Linux manylinux wheels for a specific architecture using Docker'
+ @echo ''
+ @echo 'deb'
+ @echo ' Build a Debian package locally targeting the current system'
+ @echo ''
+ @echo 'ppa'
+ @echo ' Build a Debian source package locally targeting the current system'
+ @echo ''
+ @echo 'centos-{5, 6, 7}'
+ @echo ' Build an RPM for a specific CentOS version locally'
+ @echo ''
+ @echo 'fedora-{20, 21}'
+ @echo ' Build an RPM for a specific Fedora version locally'
+
+.PHONY: help
+
+.PHONY: deb
+deb:
+ ./builddeb
+
+.PHONY: ppa
+ppa:
+ ./builddeb --source-only
+
+docker/debian-%: docker/debian.template
+ sed "s/__CODENAME__/$*/" $< > $@
+
+.PHONY: docker-debian-jessie
+docker-debian-jessie: docker/debian-jessie
+ ./dockerdeb debian jessie
+
+.PHONY: docker-debian-stretch
+docker-debian-stretch: docker/debian-stretch
+ ./dockerdeb debian stretch
+
+docker/ubuntu-%: docker/ubuntu.template
+ sed "s/__CODENAME__/$*/" $< > $@
+
+.PHONY: docker-ubuntu-trusty
+docker-ubuntu-trusty: docker/ubuntu-trusty
+ ./dockerdeb ubuntu trusty
+
+.PHONY: docker-ubuntu-trusty-ppa
+docker-ubuntu-trusty-ppa: docker/ubuntu-trusty
+ ./dockerdeb ubuntu trusty --source-only
+
+.PHONY: docker-ubuntu-xenial
+docker-ubuntu-xenial: docker/ubuntu-xenial
+ ./dockerdeb ubuntu xenial
+
+.PHONY: docker-ubuntu-xenial-ppa
+docker-ubuntu-xenial-ppa: docker/ubuntu-xenial
+ ./dockerdeb ubuntu xenial --source-only
+
+.PHONY: docker-ubuntu-artful
+docker-ubuntu-artful: docker/ubuntu-artful
+ ./dockerdeb ubuntu artful
+
+.PHONY: docker-ubuntu-artful-ppa
+docker-ubuntu-artful-ppa: docker/ubuntu-artful
+ ./dockerdeb ubuntu artful --source-only
+
+.PHONY: docker-ubuntu-bionic
+docker-ubuntu-bionic: docker/ubuntu-bionic
+ ./dockerdeb ubuntu bionic
+
+.PHONY: docker-ubuntu-bionic-ppa
+docker-ubuntu-bionic-ppa: docker/ubuntu-bionic
+ ./dockerdeb ubuntu bionic --source-only
+
+.PHONY: fedora20
+fedora20:
+ mkdir -p $(HGROOT)/packages/fedora20
+ ./buildrpm
+ cp $(HGROOT)/rpmbuild/RPMS/*/* $(HGROOT)/packages/fedora20
+ cp $(HGROOT)/rpmbuild/SRPMS/* $(HGROOT)/packages/fedora20
+ rm -rf $(HGROOT)/rpmbuild
+
+.PHONY: docker-fedora20
+docker-fedora20:
+ mkdir -p $(HGROOT)/packages/fedora20
+ ./dockerrpm fedora20
+
+.PHONY: fedora21
+fedora21:
+ mkdir -p $(HGROOT)/packages/fedora21
+ ./buildrpm
+ cp $(HGROOT)/rpmbuild/RPMS/*/* $(HGROOT)/packages/fedora21
+ cp $(HGROOT)/rpmbuild/SRPMS/* $(HGROOT)/packages/fedora21
+ rm -rf $(HGROOT)/rpmbuild
+
+.PHONY: docker-fedora21
+docker-fedora21:
+ mkdir -p $(HGROOT)packages/fedora21
+ ./dockerrpm fedora21
+
+.PHONY: centos5
+centos5:
+ mkdir -p $(HGROOT)/packages/centos5
+ ./buildrpm --withpython
+ cp $(HGROOT)/rpmbuild/RPMS/*/* $(HGROOT)/packages/centos5
+ cp $(HGROOT)/rpmbuild/SRPMS/* $(HGROOT)/packages/centos5
+
+.PHONY: docker-centos5
+docker-centos5:
+ mkdir -p $(HGROOT)/packages/centos5
+ ./dockerrpm centos5 --withpython
+
+.PHONY: centos6
+centos6:
+ mkdir -p $(HGROOT)/packages/centos6
+ ./buildrpm --withpython
+ cp $(HGROOT)/rpmbuild/RPMS/*/* $(HGROOT)/packages/centos6
+ cp $(HGROOT)/rpmbuild/SRPMS/* $(HGROOT)/packages/centos6
+
+.PHONY: docker-centos6
+docker-centos6:
+ mkdir -p $(HGROOT)/packages/centos6
+ ./dockerrpm centos6 --withpython
+
+.PHONY: centos7
+centos7:
+ mkdir -p $(HGROOT)/packages/centos7
+ ./buildrpm
+ cp $(HGROOT)/rpmbuild/RPMS/*/* $(HGROOT)/packages/centos7
+ cp $(HGROOT)/rpmbuild/SRPMS/* $(HGROOT)/packages/centos7
+
+.PHONY: docker-centos7
+docker-centos7:
+ mkdir -p $(HGROOT)/packages/centos7
+ ./dockerrpm centos7
+
+.PHONY: linux-wheels
+linux-wheels: linux-wheels-x86_64 linux-wheels-i686
+
+.PHONY: linux-wheels-x86_64
+linux-wheels-x86_64:
+ docker run -e "HGTEST_JOBS=$(shell nproc)" --rm -ti -v `pwd`/../..:/src quay.io/pypa/manylinux1_x86_64 /src/contrib/packaging/build-linux-wheels.sh
+
+.PHONY: linux-wheels-i686
+linux-wheels-i686:
+ docker run -e "HGTEST_JOBS=$(shell nproc)" --rm -ti -v `pwd`/../..:/src quay.io/pypa/manylinux1_i686 linux32 /src/contrib/packaging/build-linux-wheels.sh
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/packaging/build-linux-wheels.sh Wed Jun 06 13:31:24 2018 -0400
@@ -0,0 +1,34 @@
+#!/bin/bash
+# This file is directly inspired by
+# https://github.com/pypa/python-manylinux-demo/blob/master/travis/build-wheels.sh
+set -e -x
+
+PYTHON_TARGETS=$(ls -d /opt/python/cp27*/bin)
+
+# Create an user for the tests
+useradd hgbuilder
+
+# Bypass uid/gid problems
+cp -R /src /io && chown -R hgbuilder:hgbuilder /io
+
+# Compile wheels for Python 2.X
+for PYBIN in $PYTHON_TARGETS; do
+ "${PYBIN}/pip" wheel /io/ -w wheelhouse/
+done
+
+# Bundle external shared libraries into the wheels with
+# auditwheel (https://github.com/pypa/auditwheel) repair.
+# It also fix the ABI tag on the wheel making it pip installable.
+for whl in wheelhouse/*.whl; do
+ auditwheel repair "$whl" -w /src/wheelhouse/
+done
+
+# Install packages and run the tests for all Python versions
+cd /io/tests/
+
+for PYBIN in $PYTHON_TARGETS; do
+ # Install mercurial wheel as root
+ "${PYBIN}/pip" install mercurial --no-index -f /src/wheelhouse
+ # But run tests as hgbuilder user (non-root)
+ su hgbuilder -c "\"${PYBIN}/python\" /io/tests/run-tests.py --with-hg=\"${PYBIN}/hg\" --blacklist=/io/contrib/packaging/linux-wheel-centos5-blacklist"
+done
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/packaging/builddeb Wed Jun 06 13:31:24 2018 -0400
@@ -0,0 +1,106 @@
+#!/bin/sh -e
+#
+# Build a Mercurial debian package from the current repo
+#
+# Tested on Jessie (stable as of original script authoring.)
+
+. $(dirname $0)/packagelib.sh
+
+ROOTDIR=$(cd $(dirname $0)/../.. > /dev/null; pwd)
+
+BUILD=1
+CLEANUP=1
+DISTID=`(lsb_release -is 2> /dev/null | tr '[:upper:]' '[:lower:]') || echo debian`
+CODENAME=`lsb_release -cs 2> /dev/null || echo unknown`
+DEBFLAGS=-b
+while [ "$1" ]; do
+ case "$1" in
+ --distid )
+ shift
+ DISTID="$1"
+ shift
+ ;;
+ --codename )
+ shift
+ CODENAME="$1"
+ shift
+ ;;
+ --cleanup )
+ shift
+ BUILD=
+ ;;
+ --build )
+ shift
+ CLEANUP=
+ ;;
+ --source-only )
+ shift
+ DEBFLAGS=-S
+ ;;
+ * )
+ echo "Invalid parameter $1!" 1>&2
+ exit 1
+ ;;
+ esac
+done
+
+trap "if [ '$CLEANUP' ] ; then rm -r '$PWD/debian' ; fi" EXIT
+
+set -u
+
+if [ ! -d .hg ]; then
+ echo 'You are not inside a Mercurial repository!' 1>&2
+ exit 1
+fi
+
+gethgversion
+debver="$version"
+if [ -n "$type" ] ; then
+ debver="$debver~$type"
+fi
+if [ -n "$distance" ] ; then
+ debver="$debver+$distance-$CODENAME-$node"
+elif [ "$DEBFLAGS" = "-S" ] ; then
+ # for building a ppa (--source-only) for a release (distance == 0), we need
+ # to version the distroseries so that we can upload to launchpad
+ debver="$debver~${CODENAME}1"
+fi
+
+control=debian/control
+changelog=debian/changelog
+
+if [ "$BUILD" ]; then
+ if [ -d debian ] ; then
+ echo "Error! debian control directory already exists!"
+ exit 1
+ fi
+
+ cp -r "$ROOTDIR"/contrib/packaging/debian debian
+
+ sed -i.tmp "s/__VERSION__/$debver/" $changelog
+ sed -i.tmp "s/__DATE__/$(date --rfc-2822)/" $changelog
+ sed -i.tmp "s/__CODENAME__/$CODENAME/" $changelog
+ rm $changelog.tmp
+
+ # remove the node from the version string
+ SRCFILE="mercurial_$(echo $debver | sed "s,-$node,,").orig.tar.gz"
+ "$ROOTDIR/hg" archive $SRCFILE
+ mv $SRCFILE ..
+ debuild -us -uc -i -I $DEBFLAGS
+ if [ $? != 0 ]; then
+ echo 'debuild failed!'
+ exit 1
+ fi
+
+fi
+if [ "$CLEANUP" ] ; then
+ echo
+ OUTPUTDIR=${OUTPUTDIR:=packages/$DISTID-$CODENAME}
+ mkdir -p "$OUTPUTDIR"
+ find ../mercurial*.deb ../mercurial_*.build ../mercurial_*.changes \
+ ../mercurial*.dsc ../mercurial*.gz \
+ -type f -newer $control -print0 2>/dev/null | \
+ xargs -Inarf -0 mv narf "$OUTPUTDIR"
+ echo "Built packages for $debver:"
+ find "$OUTPUTDIR" -type f -newer $control -name '*.deb'
+fi
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/packaging/buildrpm Wed Jun 06 13:31:24 2018 -0400
@@ -0,0 +1,162 @@
+#!/bin/bash -e
+#
+# Build a Mercurial RPM from the current repo
+#
+# Tested on
+# - Fedora 20
+# - CentOS 5
+# - centOS 6
+
+. $(dirname $0)/packagelib.sh
+
+BUILD=1
+RPMBUILDDIR="$PWD/rpmbuild"
+
+while [ "$1" ]; do
+ case "$1" in
+ --prepare )
+ shift
+ BUILD=
+ ;;
+ --withpython | --with-python)
+ shift
+ PYTHONVER=2.7.14
+ PYTHONMD5=cee2e4b33ad3750da77b2e85f2f8b724
+ ;;
+ --rpmbuilddir )
+ shift
+ RPMBUILDDIR="$1"
+ shift
+ ;;
+ * )
+ echo "Invalid parameter $1!" 1>&2
+ exit 1
+ ;;
+ esac
+done
+
+cd "`dirname $0`/../.."
+
+specfile=$PWD/contrib/packaging/mercurial.spec
+if [ ! -f $specfile ]; then
+ echo "Cannot find $specfile!" 1>&2
+ exit 1
+fi
+
+if [ ! -d .hg ]; then
+ echo 'You are not inside a Mercurial repository!' 1>&2
+ exit 1
+fi
+
+gethgversion
+
+# TODO: handle distance/node set, and type set
+
+if [ -z "$type" ] ; then
+ release=1
+else
+ release=0.9_$type
+fi
+
+if [ -n "$distance" ] ; then
+ release=$release+$distance_$node
+fi
+
+if [ "$PYTHONVER" ]; then
+ release=$release+$PYTHONVER
+ RPMPYTHONVER=$PYTHONVER
+else
+ RPMPYTHONVER=%{nil}
+fi
+
+mkdir -p $RPMBUILDDIR/{SOURCES,BUILD,SRPMS,RPMS}
+$HG archive -t tgz $RPMBUILDDIR/SOURCES/mercurial-$version-$release.tar.gz
+if [ "$PYTHONVER" ]; then
+(
+ mkdir -p build
+ cd build
+ PYTHON_SRCFILE=Python-$PYTHONVER.tgz
+ [ -f $PYTHON_SRCFILE ] || curl -Lo $PYTHON_SRCFILE http://www.python.org/ftp/python/$PYTHONVER/$PYTHON_SRCFILE
+ if [ "$PYTHONMD5" ]; then
+ echo "$PYTHONMD5 $PYTHON_SRCFILE" | md5sum -w -c
+ fi
+ ln -f $PYTHON_SRCFILE $RPMBUILDDIR/SOURCES/$PYTHON_SRCFILE
+
+ DOCUTILSVER=`sed -ne "s/^%global docutilsname docutils-//p" $specfile`
+ DOCUTILS_SRCFILE=docutils-$DOCUTILSVER.tar.gz
+ [ -f $DOCUTILS_SRCFILE ] || curl -Lo $DOCUTILS_SRCFILE http://downloads.sourceforge.net/project/docutils/docutils/$DOCUTILSVER/$DOCUTILS_SRCFILE
+ DOCUTILSMD5=`sed -ne "s/^%global docutilsmd5 //p" $specfile`
+ if [ "$DOCUTILSMD5" ]; then
+ echo "$DOCUTILSMD5 $DOCUTILS_SRCFILE" | md5sum -w -c
+ fi
+ ln -f $DOCUTILS_SRCFILE $RPMBUILDDIR/SOURCES/$DOCUTILS_SRCFILE
+)
+fi
+
+mkdir -p $RPMBUILDDIR/SPECS
+rpmspec=$RPMBUILDDIR/SPECS/mercurial.spec
+
+sed -e "s,^Version:.*,Version: $version," \
+ -e "s,^Release:.*,Release: $release," \
+ $specfile > $rpmspec
+
+echo >> $rpmspec
+echo "%changelog" >> $rpmspec
+
+if echo $version | grep '+' > /dev/null 2>&1; then
+ latesttag="`echo $version | sed -e 's/+.*//'`"
+ $HG log -r .:"$latesttag" -fM \
+ --template '{date|hgdate}\t{author}\t{desc|firstline}\n' | python -c '
+import sys, time
+
+def datestr(date, format):
+ return time.strftime(format, time.gmtime(float(date[0]) - date[1]))
+
+changelog = []
+for l in sys.stdin.readlines():
+ tok = l.split("\t")
+ hgdate = tuple(int(v) for v in tok[0].split())
+ changelog.append((datestr(hgdate, "%F"), tok[1], hgdate, tok[2]))
+prevtitle = ""
+for l in sorted(changelog, reverse=True):
+ title = "* %s %s" % (datestr(l[2], "%a %b %d %Y"), l[1])
+ if prevtitle != title:
+ prevtitle = title
+ print
+ print title
+ print "- %s" % l[3].strip()
+' >> $rpmspec
+
+else
+
+ $HG log \
+ --template '{date|hgdate}\t{author}\t{desc|firstline}\n' \
+ .hgtags | python -c '
+import sys, time
+
+def datestr(date, format):
+ return time.strftime(format, time.gmtime(float(date[0]) - date[1]))
+
+for l in sys.stdin.readlines():
+ tok = l.split("\t")
+ hgdate = tuple(int(v) for v in tok[0].split())
+ print "* %s %s\n- %s" % (datestr(hgdate, "%a %b %d %Y"), tok[1], tok[2])
+' >> $rpmspec
+
+fi
+
+sed -i \
+ -e "s/^%define withpython.*$/%define withpython $RPMPYTHONVER/" \
+ $rpmspec
+
+if [ "$BUILD" ]; then
+ rpmbuild --define "_topdir $RPMBUILDDIR" -ba $rpmspec --clean
+ if [ $? = 0 ]; then
+ echo
+ echo "Built packages for $version-$release:"
+ find $RPMBUILDDIR/*RPMS/ -type f -newer $rpmspec
+ fi
+else
+ echo "Prepared sources for $version-$release $rpmspec are in $RPMBUILDDIR/SOURCES/ - use like:"
+ echo "rpmbuild --define '_topdir $RPMBUILDDIR' -ba $rpmspec --clean"
+fi
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/packaging/debian/cacerts.rc Wed Jun 06 13:31:24 2018 -0400
@@ -0,0 +1,5 @@
+# This config file points Mercurial at the system-wide certificate
+# store from the ca-certificates package.
+
+[web]
+cacerts = /etc/ssl/certs/ca-certificates.crt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/packaging/debian/changelog Wed Jun 06 13:31:24 2018 -0400
@@ -0,0 +1,5 @@
+mercurial (__VERSION__) __CODENAME__; urgency=medium
+
+ * Automated build performed by upstream.
+
+ -- Mercurial Devel <mercurial-devel@mercurial-scm.org> __DATE__
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/packaging/debian/compat Wed Jun 06 13:31:24 2018 -0400
@@ -0,0 +1,1 @@
+9
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/packaging/debian/control Wed Jun 06 13:31:24 2018 -0400
@@ -0,0 +1,54 @@
+Source: mercurial
+Section: vcs
+Priority: optional
+Maintainer: Mercurial Developers <mercurial-devel@mercurial-scm.org>
+Build-Depends:
+ debhelper (>= 9),
+ dh-python,
+ less,
+ netbase,
+ python-all,
+ python-all-dev,
+ python-docutils,
+ unzip,
+ zip
+Standards-Version: 3.9.4
+X-Python-Version: >= 2.7
+
+Package: mercurial
+Depends:
+ python,
+ ${shlibs:Depends},
+ ${misc:Depends},
+ ${python:Depends},
+ mercurial-common (= ${source:Version})
+Architecture: any
+Description: fast, easy to use, distributed revision control tool.
+ Mercurial is a fast, lightweight Source Control Management system designed
+ for efficient handling of very large distributed projects.
+ .
+ Its features include:
+ * O(1) delta-compressed file storage and retrieval scheme
+ * Complete cross-indexing of files and changesets for efficient exploration
+ of project history
+ * Robust SHA1-based integrity checking and append-only storage model
+ * Decentralized development model with arbitrary merging between trees
+ * Easy-to-use command-line interface
+ * Integrated stand-alone web interface
+ * Small Python codebase
+
+Package: mercurial-common
+Architecture: all
+Depends:
+ ${misc:Depends},
+ ${python:Depends},
+Recommends: mercurial (= ${source:Version}), ca-certificates
+Suggests: wish
+Breaks: mercurial (<< ${source:Version})
+Replaces: mercurial (<< 2.6.3)
+Description: easy-to-use, scalable distributed version control system (common files)
+ Mercurial is a fast, lightweight Source Control Management system designed
+ for efficient handling of very large distributed projects.
+ .
+ This package contains the architecture independent components of Mercurial,
+ and is generally useless without the mercurial package.
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/packaging/debian/copyright Wed Jun 06 13:31:24 2018 -0400
@@ -0,0 +1,27 @@
+Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
+Upstream-Name: mercurial
+Source: https://www.mercurial-scm.org/
+
+Files: *
+Copyright: 2005-2018, Matt Mackall <mpm@selenic.com> and others.
+License: GPL-2+
+ This program is free software; you can redistribute it
+ and/or modify it under the terms of the GNU General Public
+ License as published by the Free Software Foundation; either
+ version 2 of the License, or (at your option) any later
+ version.
+ .
+ This program is distributed in the hope that it will be
+ useful, but WITHOUT ANY WARRANTY; without even the implied
+ warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+ PURPOSE. See the GNU General Public License for more
+ details.
+ .
+ You should have received a copy of the GNU General Public
+ License along with this package; if not, write to the Free
+ Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ Boston, MA 02110-1301 USA
+ .
+ On Debian systems, the full text of the GNU General Public
+ License version 2 can be found in the file
+ `/usr/share/common-licenses/GPL-2'.
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/packaging/debian/default-tools.rc Wed Jun 06 13:31:24 2018 -0400
@@ -0,0 +1,5 @@
+[ui]
+editor = sensible-editor
+
+[pager]
+pager = sensible-pager
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/packaging/debian/hgkpath.rc Wed Jun 06 13:31:24 2018 -0400
@@ -0,0 +1,2 @@
+[hgk]
+path = /usr/share/mercurial/hgk
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/packaging/debian/rules Wed Jun 06 13:31:24 2018 -0400
@@ -0,0 +1,44 @@
+#!/usr/bin/make -f
+# Uncomment this to turn on verbose mode.
+# export DH_VERBOSE=1
+
+CPUS=$(shell cat /proc/cpuinfo | grep -E ^processor | wc -l)
+
+%:
+ dh $@ --with python2
+
+override_dh_auto_test:
+ http_proxy='' dh_auto_test -- TESTFLAGS="-j$(CPUS)"
+
+override_dh_python2:
+ dh_python2
+ find debian/mercurial/usr/share -type d -empty -delete
+
+override_dh_install:
+ python$(PYVERS) setup.py install --root "$(CURDIR)"/debian/mercurial --install-layout=deb
+ # chg
+ make -C contrib/chg \
+ DESTDIR="$(CURDIR)"/debian/mercurial \
+ PREFIX=/usr \
+ clean install
+ # remove arch-independent python stuff
+ find "$(CURDIR)"/debian/mercurial/usr/lib \
+ ! -name '*.so' ! -type d -delete , \
+ -type d -empty -delete
+ python$(PYVERS) setup.py install --root "$(CURDIR)/debian/mercurial-common" --install-layout=deb
+ make install-doc PREFIX="$(CURDIR)"/debian/mercurial-common/usr
+ # remove arch-dependent python stuff
+ find "$(CURDIR)"/debian/mercurial-common/usr/lib \
+ -name '*.so' ! -type d -delete , \
+ -type d -empty -delete
+ cp contrib/hg-ssh "$(CURDIR)"/debian/mercurial-common/usr/bin
+ mkdir -p "$(CURDIR)"/debian/mercurial-common/usr/share/mercurial
+ cp contrib/hgk "$(CURDIR)"/debian/mercurial-common/usr/share/mercurial
+ mkdir -p "$(CURDIR)"/debian/mercurial-common/etc/mercurial/hgrc.d/
+ cp contrib/packaging/debian/*.rc "$(CURDIR)"/debian/mercurial-common/etc/mercurial/hgrc.d/
+ # completions
+ mkdir -p "$(CURDIR)"/debian/mercurial-common/usr/share/bash-completion/completions
+ cp contrib/bash_completion "$(CURDIR)"/debian/mercurial-common/usr/share/bash-completion/completions/hg
+ mkdir -p "$(CURDIR)"/debian/mercurial-common/usr/share/zsh/vendor-completions
+ cp contrib/zsh_completion "$(CURDIR)"/debian/mercurial-common/usr/share/zsh/vendor-completions/_hg
+ rm "$(CURDIR)"/debian/mercurial-common/usr/bin/hg
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/packaging/docker/centos5 Wed Jun 06 13:31:24 2018 -0400
@@ -0,0 +1,23 @@
+FROM centos:centos5
+RUN \
+ sed -i 's/^mirrorlist/#mirrorlist/' /etc/yum.repos.d/*.repo && \
+ sed -i 's/^#\(baseurl=\)http:\/\/mirror.centos.org\/centos/\1http:\/\/vault.centos.org/' /etc/yum.repos.d/*.repo && \
+ sed -i 's/\$releasever/5.11/' /etc/yum.repos.d/*.repo
+
+RUN yum install -y \
+ gcc \
+ gettext \
+ make \
+ python-devel \
+ python-docutils \
+ rpm-build \
+ tar
+
+# For creating repo meta data
+RUN yum install -y \
+ bzip2-devel \
+ createrepo \
+ ncurses-devel \
+ openssl-devel \
+ readline-devel \
+ zlib-devel
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/packaging/docker/centos6 Wed Jun 06 13:31:24 2018 -0400
@@ -0,0 +1,20 @@
+FROM centos:centos6
+RUN yum install -y \
+ gcc \
+ gettext \
+ make \
+ python-devel \
+ python-docutils \
+ rpm-build \
+ tar
+
+# For creating repo meta data
+RUN yum install -y createrepo
+
+# For python
+RUN yum install -y \
+ bzip2-devel \
+ ncurses-devel \
+ openssl-devel \
+ readline-devel \
+ zlib-devel
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/packaging/docker/centos7 Wed Jun 06 13:31:24 2018 -0400
@@ -0,0 +1,12 @@
+FROM centos:centos7
+RUN yum install -y \
+ gcc \
+ gettext \
+ make \
+ python-devel \
+ python-docutils \
+ rpm-build \
+ tar
+
+# For creating repo meta data
+RUN yum install -y createrepo
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/packaging/docker/debian.template Wed Jun 06 13:31:24 2018 -0400
@@ -0,0 +1,12 @@
+FROM debian:__CODENAME__
+RUN apt-get update && apt-get install -y \
+ build-essential \
+ debhelper \
+ devscripts \
+ dh-python \
+ less \
+ python \
+ python-all-dev \
+ python-docutils \
+ unzip \
+ zip
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/packaging/docker/fedora20 Wed Jun 06 13:31:24 2018 -0400
@@ -0,0 +1,11 @@
+FROM fedora:20
+RUN yum install -y \
+ gcc \
+ gettext \
+ make \
+ python-devel \
+ python-docutils \
+ rpm-build
+
+# For creating repo meta data
+RUN yum install -y createrepo
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/packaging/docker/fedora21 Wed Jun 06 13:31:24 2018 -0400
@@ -0,0 +1,11 @@
+FROM fedora:21
+RUN yum install -y \
+ gcc \
+ gettext \
+ make \
+ python-devel \
+ python-docutils \
+ rpm-build
+
+# For creating repo meta data
+RUN yum install -y createrepo
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/packaging/docker/ubuntu.template Wed Jun 06 13:31:24 2018 -0400
@@ -0,0 +1,12 @@
+FROM ubuntu:__CODENAME__
+RUN apt-get update && apt-get install -y \
+ build-essential \
+ debhelper \
+ devscripts \
+ dh-python \
+ less \
+ python \
+ python-all-dev \
+ python-docutils \
+ unzip \
+ zip
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/packaging/dockerdeb Wed Jun 06 13:31:24 2018 -0400
@@ -0,0 +1,35 @@
+#!/bin/bash -eu
+
+. $(dirname $0)/dockerlib.sh
+. $(dirname $0)/packagelib.sh
+
+BUILDDIR=$(dirname $0)
+export ROOTDIR=$(cd $BUILDDIR/../.. > /dev/null; pwd)
+
+checkdocker
+
+DISTID="$1"
+CODENAME="$2"
+PLATFORM="$1-$2"
+shift; shift # extra params are passed to build process
+
+OUTPUTDIR=${OUTPUTDIR:=$ROOTDIR/packages/$PLATFORM}
+
+initcontainer $PLATFORM
+
+# debuild only appears to be able to save built debs etc to .., so we
+# have to share the .. of the current directory with the docker
+# container and hope it's writable. Whee.
+dn=$(basename $ROOTDIR)
+
+if [ $(uname) = "Darwin" ] ; then
+ $DOCKER run -u $DBUILDUSER --rm -v $PWD/..:/mnt $CONTAINER \
+ sh -c "cd /mnt/$dn && make clean && make local"
+fi
+$DOCKER run -u $DBUILDUSER --rm -v $ROOTDIR/..:/mnt $CONTAINER \
+ sh -c "cd /mnt/$dn && DEB_BUILD_OPTIONS='${DEB_BUILD_OPTIONS:=}' contrib/packaging/builddeb --build --distid $DISTID --codename $CODENAME $@"
+contrib/packaging/builddeb --cleanup --distid $DISTID --codename $CODENAME
+if [ $(uname) = "Darwin" ] ; then
+ $DOCKER run -u $DBUILDUSER --rm -v $PWD/..:/mnt $CONTAINER \
+ sh -c "cd /mnt/$dn && make clean"
+fi
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/packaging/dockerlib.sh Wed Jun 06 13:31:24 2018 -0400
@@ -0,0 +1,42 @@
+#!/bin/sh -eu
+
+# This function exists to set up the DOCKER variable and verify that
+# it's the binary we expect. It also verifies that the docker service
+# is running on the system and we can talk to it.
+function checkdocker() {
+ if which docker.io >> /dev/null 2>&1 ; then
+ DOCKER=docker.io
+ elif which docker >> /dev/null 2>&1 ; then
+ DOCKER=docker
+ else
+ echo "Error: docker must be installed"
+ exit 1
+ fi
+
+ $DOCKER -h 2> /dev/null | grep -q Jansens && { echo "Error: $DOCKER is the Docking System Tray - install docker.io instead"; exit 1; }
+ $DOCKER version | grep -Eq "^Client( version)?:" || { echo "Error: unexpected output from \"$DOCKER version\""; exit 1; }
+ $DOCKER version | grep -Eq "^Server( version)?:" || { echo "Error: could not get docker server version - check it is running and your permissions"; exit 1; }
+}
+
+# Construct a container and leave its name in $CONTAINER for future use.
+function initcontainer() {
+ [ "$1" ] || { echo "Error: platform name must be specified"; exit 1; }
+
+ DFILE="$ROOTDIR/contrib/packaging/docker/$1"
+ [ -f "$DFILE" ] || { echo "Error: docker file $DFILE not found"; exit 1; }
+
+ CONTAINER="hg-dockerrpm-$1"
+ DBUILDUSER=build
+ (
+ cat $DFILE
+ if [ $(uname) = "Darwin" ] ; then
+ # The builder is using boot2docker on OS X, so we're going to
+ # *guess* the uid of the user inside the VM that is actually
+ # running docker. This is *very likely* to fail at some point.
+ echo RUN useradd $DBUILDUSER -u 1000
+ else
+ echo RUN groupadd $DBUILDUSER -g `id -g` -o
+ echo RUN useradd $DBUILDUSER -u `id -u` -g $DBUILDUSER -o
+ fi
+ ) | $DOCKER build --build-arg http_proxy --build-arg https_proxy --tag $CONTAINER -
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/packaging/dockerrpm Wed Jun 06 13:31:24 2018 -0400
@@ -0,0 +1,37 @@
+#!/bin/bash -e
+
+. $(dirname $0)/dockerlib.sh
+
+BUILDDIR=$(dirname $0)
+export ROOTDIR=$(cd $BUILDDIR/../..; pwd)
+
+checkdocker
+
+PLATFORM="$1"
+shift # extra params are passed to buildrpm
+
+initcontainer $PLATFORM
+
+RPMBUILDDIR=$ROOTDIR/packages/$PLATFORM
+$ROOTDIR/contrib/packaging/buildrpm --rpmbuilddir $RPMBUILDDIR --prepare $*
+
+DSHARED=/mnt/shared
+$DOCKER run -e http_proxy -e https_proxy -u $DBUILDUSER --rm -v $RPMBUILDDIR:$DSHARED $CONTAINER \
+ rpmbuild --define "_topdir $DSHARED" -ba $DSHARED/SPECS/mercurial.spec --clean
+
+$DOCKER run -e http_proxy -e https_proxy -u $DBUILDUSER --rm -v $RPMBUILDDIR:$DSHARED $CONTAINER \
+ createrepo $DSHARED
+
+cat << EOF > $RPMBUILDDIR/mercurial.repo
+# Place this file in /etc/yum.repos.d/mercurial.repo
+[mercurial]
+name=Mercurial packages for $PLATFORM
+# baseurl=file://$RPMBUILDDIR/
+baseurl=http://hg.example.com/build/$PLATFORM/
+skip_if_unavailable=True
+gpgcheck=0
+enabled=1
+EOF
+
+echo
+echo "Build complete - results can be found in $RPMBUILDDIR"
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/packaging/linux-wheel-centos5-blacklist Wed Jun 06 13:31:24 2018 -0400
@@ -0,0 +1,3 @@
+test-convert-git.t
+test-subrepo-git.t
+test-patchbomb-tls.t
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/packaging/macosx/Readme.html Wed Jun 06 13:31:24 2018 -0400
@@ -0,0 +1,37 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">
+<!-- This is the second screen displayed during the install. -->
+<html>
+<head>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+ <meta http-equiv="Content-Style-Type" content="text/css">
+ <title>Read Me - Important Information</title>
+ <style type="text/css">
+ p.p1 {margin: 0.0px 0.0px 0.0px 0.0px; font: 14.0px Helvetica}
+ p.p2 {margin: 0.0px 0.0px 0.0px 0.0px; font: 12.0px Helvetica; min-height: 14.0px}
+ p.p3 {margin: 0.0px 0.0px 0.0px 0.0px; font: 12.0px Helvetica}
+ p.p4 {margin: 0.0px 0.0px 0.0px 0.0px; font: 12.0px Helvetica; color: #000fed}
+ span.s1 {text-decoration: underline}
+ span.s2 {font: 12.0px Courier}
+ </style>
+</head>
+<body>
+<p class="p1"><b>Before you install</b></p>
+<p class="p2"><br></p>
+<p class="p3">This is an OS X version of Mercurial that depends on the default Python installation.</p>
+<p class="p2"><br></p>
+<p class="p1"><b>After you install</b></p>
+<p class="p2"><br></p>
+<p class="p3">This package installs the <span class="s2">hg</span> executable as <span class="s2">/usr/local/bin/hg</span>. See <span class="s2">hg debuginstall</span> for more info on file locations.</p>
+<p class="p2"><br></p>
+<p class="p1"><b>Documentation</b></p>
+<p class="p2"><br></p>
+<p class="p3">Visit the <a href="https://mercurial-scm.org/">Mercurial web site and wiki</a></p>
+<p class="p2"><br></p>
+<p class="p3">There's also a free book, <a href="https://book.mercurial-scm.org/">Distributed revision control with Mercurial</a></p>
+<p class="p2"><br></p>
+<p class="p1"><b>Reporting problems</b></p>
+<p class="p2"><br></p>
+<p class="p3">If you run into any problems, please file a bug online:</p>
+<p class="p3"><a href="https://bz.mercurial-scm.org/">https://bz.mercurial-scm.org/</a></p>
+</body>
+</html>
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/packaging/macosx/Welcome.html Wed Jun 06 13:31:24 2018 -0400
@@ -0,0 +1,20 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">
+<!-- This is the first screen displayed during the install. -->
+<html>
+<head>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+ <meta http-equiv="Content-Style-Type" content="text/css">
+ <title></title>
+ <style type="text/css">
+ p.p1 {margin: 0.0px 0.0px 0.0px 0.0px; font: 14.0px Helvetica}
+ p.p2 {margin: 0.0px 0.0px 0.0px 0.0px; font: 12.0px Helvetica; min-height: 14.0px}
+ </style>
+</head>
+<body>
+<p class="p1">This is a prepackaged release of <a href="https://mercurial-scm.org/">Mercurial</a> for Mac OS X.</p>
+<p class="p2"><br></p>
+<br>
+<p>
+Please be sure to read the latest <a href="https://mercurial-scm.org/wiki/WhatsNew">release notes</a>.</p>
+</body>
+</html>
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/packaging/macosx/distribution.xml Wed Jun 06 13:31:24 2018 -0400
@@ -0,0 +1,19 @@
+<?xml version="1.0" encoding="utf-8" standalone="no"?>
+<installer-gui-script minSpecVersion="1">
+ <title>Mercurial SCM</title>
+ <organization>org.mercurial-scm</organization>
+ <options customize="never" require-scripts="false" rootVolumeOnly="true" />
+ <welcome file="Welcome.html" mime-type="text/html" />
+ <license file="../../../COPYING" mime-type="text/plain" />
+ <readme file="Readme.html" mime-type="text/html" />
+ <pkg-ref id="org.mercurial-scm.mercurial"
+ version="0"
+ auth="root"
+ onConclusion="none">mercurial.pkg</pkg-ref>
+ <choices-outline>
+ <line choice="org.mercurial-scm.mercurial"/>
+ </choices-outline>
+ <choice id="org.mercurial-scm.mercurial" visible="false">
+ <pkg-ref id="org.mercurial-scm.mercurial"/>
+ </choice>
+</installer-gui-script>
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/packaging/mercurial.spec Wed Jun 06 13:31:24 2018 -0400
@@ -0,0 +1,164 @@
+%global emacs_lispdir %{_datadir}/emacs/site-lisp
+
+%define withpython %{nil}
+
+%if "%{?withpython}"
+
+%global pythonver %{withpython}
+%global pythonname Python-%{withpython}
+%global docutilsname docutils-0.14
+%global docutilsmd5 c53768d63db3873b7d452833553469de
+%global pythonhg python-hg
+%global hgpyprefix /opt/%{pythonhg}
+# byte compilation will fail on some some Python /test/ files
+%global _python_bytecompile_errors_terminate_build 0
+
+%else
+
+%global pythonver %(python -c 'import sys;print ".".join(map(str, sys.version_info[:2]))')
+
+%endif
+
+Summary: A fast, lightweight Source Control Management system
+Name: mercurial
+Version: snapshot
+Release: 0
+License: GPLv2+
+Group: Development/Tools
+URL: https://mercurial-scm.org/
+Source0: %{name}-%{version}-%{release}.tar.gz
+%if "%{?withpython}"
+Source1: %{pythonname}.tgz
+Source2: %{docutilsname}.tar.gz
+%endif
+BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root
+
+BuildRequires: make, gcc, gettext
+%if "%{?withpython}"
+BuildRequires: readline-devel, openssl-devel, ncurses-devel, zlib-devel, bzip2-devel
+%else
+BuildRequires: python >= 2.7, python-devel, python-docutils >= 0.5
+Requires: python >= 2.7
+%endif
+# The hgk extension uses the wish tcl interpreter, but we don't enforce it
+#Requires: tk
+
+%description
+Mercurial is a fast, lightweight source control management system designed
+for efficient handling of very large distributed projects.
+
+%prep
+
+%if "%{?withpython}"
+%setup -q -n mercurial-%{version}-%{release} -a1 -a2
+# despite the comments in cgi.py, we do this to prevent rpmdeps from picking /usr/local/bin/python up
+sed -i '1c#! /usr/bin/env python' %{pythonname}/Lib/cgi.py
+%else
+%setup -q -n mercurial-%{version}-%{release}
+%endif
+
+%build
+
+%if "%{?withpython}"
+
+PYPATH=$PWD/%{pythonname}
+cd $PYPATH
+./configure --prefix=%{hgpyprefix}
+make all %{?_smp_mflags}
+cd -
+
+cd %{docutilsname}
+LD_LIBRARY_PATH=$PYPATH $PYPATH/python setup.py build
+cd -
+
+# verify Python environment
+LD_LIBRARY_PATH=$PYPATH PYTHONPATH=$PWD/%{docutilsname} $PYPATH/python -c 'import sys, zlib, bz2, ssl, curses, readline'
+
+# set environment for make
+export PATH=$PYPATH:$PATH
+export LD_LIBRARY_PATH=$PYPATH
+export CFLAGS="-L $PYPATH"
+export PYTHONPATH=$PWD/%{docutilsname}
+
+%endif
+
+make all
+make -C contrib/chg
+
+%install
+rm -rf $RPM_BUILD_ROOT
+
+%if "%{?withpython}"
+
+PYPATH=$PWD/%{pythonname}
+cd $PYPATH
+make install DESTDIR=$RPM_BUILD_ROOT
+# these .a are not necessary and they are readonly and strip fails - kill them!
+rm -f %{buildroot}%{hgpyprefix}/lib/{,python2.*/config}/libpython2.*.a
+cd -
+
+cd %{docutilsname}
+LD_LIBRARY_PATH=$PYPATH $PYPATH/python setup.py install --root="$RPM_BUILD_ROOT"
+cd -
+
+PATH=$PYPATH:$PATH LD_LIBRARY_PATH=$PYPATH make install DESTDIR=$RPM_BUILD_ROOT PREFIX=%{hgpyprefix} MANDIR=%{_mandir}
+mkdir -p $RPM_BUILD_ROOT%{_bindir}
+( cd $RPM_BUILD_ROOT%{_bindir}/ && ln -s ../..%{hgpyprefix}/bin/hg . )
+( cd $RPM_BUILD_ROOT%{_bindir}/ && ln -s ../..%{hgpyprefix}/bin/python2.? %{pythonhg} )
+
+%else
+
+make install DESTDIR=$RPM_BUILD_ROOT PREFIX=%{_prefix} MANDIR=%{_mandir}
+
+%endif
+
+install -m 755 contrib/chg/chg $RPM_BUILD_ROOT%{_bindir}/
+install -m 755 contrib/hgk $RPM_BUILD_ROOT%{_bindir}/
+install -m 755 contrib/hg-ssh $RPM_BUILD_ROOT%{_bindir}/
+
+bash_completion_dir=$RPM_BUILD_ROOT%{_sysconfdir}/bash_completion.d
+mkdir -p $bash_completion_dir
+install -m 644 contrib/bash_completion $bash_completion_dir/mercurial.sh
+
+zsh_completion_dir=$RPM_BUILD_ROOT%{_datadir}/zsh/site-functions
+mkdir -p $zsh_completion_dir
+install -m 644 contrib/zsh_completion $zsh_completion_dir/_mercurial
+
+mkdir -p $RPM_BUILD_ROOT%{emacs_lispdir}
+install -m 644 contrib/mercurial.el $RPM_BUILD_ROOT%{emacs_lispdir}/
+install -m 644 contrib/mq.el $RPM_BUILD_ROOT%{emacs_lispdir}/
+
+mkdir -p $RPM_BUILD_ROOT/%{_sysconfdir}/mercurial/hgrc.d
+
+%clean
+rm -rf $RPM_BUILD_ROOT
+
+%files
+%defattr(-,root,root,-)
+%doc CONTRIBUTORS COPYING doc/README doc/hg*.txt doc/hg*.html *.cgi contrib/*.fcgi
+%doc %attr(644,root,root) %{_mandir}/man?/hg*
+%doc %attr(644,root,root) contrib/*.svg
+%dir %{_datadir}/zsh/
+%dir %{_datadir}/zsh/site-functions/
+%{_datadir}/zsh/site-functions/_mercurial
+%dir %{_datadir}/emacs/site-lisp/
+%{_datadir}/emacs/site-lisp/mercurial.el
+%{_datadir}/emacs/site-lisp/mq.el
+%{_bindir}/hg
+%{_bindir}/chg
+%{_bindir}/hgk
+%{_bindir}/hg-ssh
+%dir %{_sysconfdir}/bash_completion.d/
+%config(noreplace) %{_sysconfdir}/bash_completion.d/mercurial.sh
+%dir %{_sysconfdir}/mercurial
+%dir %{_sysconfdir}/mercurial/hgrc.d
+%if "%{?withpython}"
+%{_bindir}/%{pythonhg}
+%{hgpyprefix}
+%else
+%{_libdir}/python%{pythonver}/site-packages/%{name}-*-py%{pythonver}.egg-info
+%{_libdir}/python%{pythonver}/site-packages/%{name}
+%{_libdir}/python%{pythonver}/site-packages/hgext
+%{_libdir}/python%{pythonver}/site-packages/hgext3rd
+%{_libdir}/python%{pythonver}/site-packages/hgdemandimport
+%endif
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/packaging/packagelib.sh Wed Jun 06 13:31:24 2018 -0400
@@ -0,0 +1,38 @@
+# Extract version number into 4 parts, some of which may be empty:
+#
+# version: the numeric part of the most recent tag. Will always look like 1.3.
+#
+# type: if an rc build, "rc", otherwise empty
+#
+# distance: the distance from the nearest tag, or empty if built from a tag
+#
+# node: the node|short hg was built from, or empty if built from a tag
+gethgversion() {
+ export HGRCPATH=
+ export HGPLAIN=
+
+ make cleanbutpackages
+ make local PURE=--pure
+ HG="$PWD/hg"
+
+ "$HG" version > /dev/null || { echo 'abort: hg version failed!'; exit 1 ; }
+
+ hgversion=`LANGUAGE=C "$HG" version | sed -ne 's/.*(version \(.*\))$/\1/p'`
+
+ if echo $hgversion | grep + > /dev/null 2>&1 ; then
+ tmp=`echo $hgversion | cut -d+ -f 2`
+ hgversion=`echo $hgversion | cut -d+ -f 1`
+ distance=`echo $tmp | cut -d- -f 1`
+ node=`echo $tmp | cut -d- -f 2`
+ else
+ distance=''
+ node=''
+ fi
+ if echo $hgversion | grep -- '-' > /dev/null 2>&1; then
+ version=`echo $hgversion | cut -d- -f1`
+ type=`echo $hgversion | cut -d- -f2`
+ else
+ version=$hgversion
+ type=''
+ fi
+}
--- a/contrib/perf.py Wed Jun 06 13:28:49 2018 -0400
+++ b/contrib/perf.py Wed Jun 06 13:31:24 2018 -0400
@@ -71,6 +71,16 @@
import inspect
getargspec = inspect.getargspec
+try:
+ # 4.7+
+ queue = pycompat.queue.Queue
+except (AttributeError, ImportError):
+ # <4.7.
+ try:
+ queue = pycompat.queue
+ except (AttributeError, ImportError):
+ queue = util.queue
+
# for "historical portability":
# define util.safehasattr forcibly, because util.safehasattr has been
# available since 1.9.3 (or 94b200a11cf7)
@@ -1029,7 +1039,7 @@
else:
mdiff.textdiff(*pair)
else:
- q = util.queue()
+ q = queue()
for i in xrange(threads):
q.put(None)
ready = threading.Condition()
--- a/contrib/phabricator.py Wed Jun 06 13:28:49 2018 -0400
+++ b/contrib/phabricator.py Wed Jun 06 13:31:24 2018 -0400
@@ -21,10 +21,6 @@
# Phabricator URL
url = https://phab.example.com/
- # API token. Get it from https://$HOST/conduit/login/
- # Deprecated: see [phabricator.auth] below
- #token = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
-
# Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
# callsign is "FOO".
callsign = FOO
@@ -35,10 +31,12 @@
# the internal library.
curlcmd = curl --connect-timeout 2 --retry 3 --silent
- [phabricator.auth]
- example.url = https://phab.example.com/
+ [auth]
+ example.schemes = https
+ example.prefix = phab.example.com
+
# API token. Get it from https://$HOST/conduit/login/
- example.token = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
+ example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
"""
from __future__ import absolute_import
@@ -55,6 +53,7 @@
context,
encoding,
error,
+ httpconnection as httpconnectionmod,
mdiff,
obsutil,
parser,
@@ -74,6 +73,30 @@
cmdtable = {}
command = registrar.command(cmdtable)
+configtable = {}
+configitem = registrar.configitem(configtable)
+
+# developer config: phabricator.batchsize
+configitem('phabricator', 'batchsize',
+ default=12,
+)
+configitem('phabricator', 'callsign',
+ default=None,
+)
+configitem('phabricator', 'curlcmd',
+ default=None,
+)
+# developer config: phabricator.repophid
+configitem('phabricator', 'repophid',
+ default=None,
+)
+configitem('phabricator', 'url',
+ default=None,
+)
+configitem('phabsend', 'confirm',
+ default=False,
+)
+
colortable = {
'phabricator.action.created': 'green',
'phabricator.action.skipped': 'magenta',
@@ -106,30 +129,11 @@
printed_token_warning = False
-def readlegacytoken(repo):
+def readlegacytoken(repo, url):
"""Transitional support for old phabricator tokens.
- Remove before the 4.6 release.
+ Remove before the 4.7 release.
"""
- global printed_token_warning
- token = repo.ui.config('phabricator', 'token')
- if token and not printed_token_warning:
- printed_token_warning = True
- repo.ui.warn(_('phabricator.token is deprecated - please '
- 'migrate to the phabricator.auth section.\n'))
- return token
-
-def readurltoken(repo):
- """return conduit url, token and make sure they exist
-
- Currently read from [phabricator] config section. In the future, it might
- make sense to read from .arcconfig and .arcrc as well.
- """
- url = repo.ui.config('phabricator', 'url')
- if not url:
- raise error.Abort(_('config %s.%s is required')
- % ('phabricator', 'url'))
-
groups = {}
for key, val in repo.ui.configitems('phabricator.auth'):
if '.' not in key:
@@ -147,8 +151,37 @@
if token:
break
+ global printed_token_warning
+
+ if token and not printed_token_warning:
+ printed_token_warning = True
+ repo.ui.warn(_('phabricator.auth.token is deprecated - please '
+ 'migrate to auth.phabtoken.\n'))
+ return token
+
+def readurltoken(repo):
+ """return conduit url, token and make sure they exist
+
+ Currently read from [auth] config section. In the future, it might
+ make sense to read from .arcconfig and .arcrc as well.
+ """
+ url = repo.ui.config('phabricator', 'url')
+ if not url:
+ raise error.Abort(_('config %s.%s is required')
+ % ('phabricator', 'url'))
+
+ res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
+ token = None
+
+ if res:
+ group, auth = res
+
+ repo.ui.debug("using auth.%s.* for authentication\n" % group)
+
+ token = auth.get('phabtoken')
+
if not token:
- token = readlegacytoken(repo)
+ token = readlegacytoken(repo, url)
if not token:
raise error.Abort(_('Can\'t find conduit token associated to %s')
% (url,))
@@ -741,7 +774,7 @@
drevs, ancestordrevs = _prefetchdrevs(tree)
# developer config: phabricator.batchsize
- batchsize = repo.ui.configint('phabricator', 'batchsize', 12)
+ batchsize = repo.ui.configint('phabricator', 'batchsize')
# Prefetch Differential Revisions in batch
tofetch = set(drevs)
--- a/contrib/python3-whitelist Wed Jun 06 13:28:49 2018 -0400
+++ b/contrib/python3-whitelist Wed Jun 06 13:31:24 2018 -0400
@@ -36,10 +36,12 @@
test-cappedreader.py
test-casecollision.t
test-cat.t
+test-cbor.py
test-censor.t
test-changelog-exec.t
test-check-commit.t
test-check-execute.t
+test-check-interfaces.py
test-check-module-imports.t
test-check-pyflakes.t
test-check-pylint.t
@@ -61,10 +63,14 @@
test-config.t
test-conflict.t
test-confused-revert.t
+test-context.py
test-contrib-check-code.t
test-contrib-check-commit.t
test-convert-authormap.t
test-convert-clonebranches.t
+test-convert-cvs-branch.t
+test-convert-cvs-detectmerge.t
+test-convert-cvs.t
test-convert-datesort.t
test-convert-filemap.t
test-convert-hg-sink.t
@@ -99,6 +105,7 @@
test-dirstate-backup.t
test-dirstate-nonnormalset.t
test-dirstate.t
+test-dispatch.py
test-doctest.py
test-double-merge.t
test-drawdag.t
@@ -143,8 +150,12 @@
test-export.t
test-extdata.t
test-extdiff.t
+test-extensions-afterloaded.t
+test-extensions-wrapfunction.py
test-extra-filelog-entry.t
test-filebranch.t
+test-filecache.py
+test-filelog.py
test-fileset-generated.t
test-fix-topology.t
test-flags.t
@@ -158,10 +169,12 @@
test-hghave.t
test-hgignore.t
test-hgk.t
+test-hgrc.t
test-hgweb-bundle.t
test-hgweb-descend-empties.t
test-hgweb-empty.t
test-hgweb-removed.t
+test-hgwebdir-paths.py
test-hgwebdirsym.t
test-histedit-arguments.t
test-histedit-base.t
@@ -181,7 +194,9 @@
test-http-bundle1.t
test-http-clone-r.t
test-http.t
+test-hybridencode.py
test-identify.t
+test-import-merge.t
test-import-unknown.t
test-import.t
test-imports-checker.t
@@ -215,6 +230,7 @@
test-largefiles-update.t
test-largefiles.t
test-lfs-largefiles.t
+test-lfs-pointer.py
test-linerange.py
test-locate.t
test-lock-badness.t
@@ -254,6 +270,8 @@
test-merge7.t
test-merge8.t
test-merge9.t
+test-minifileset.py
+test-minirst.py
test-mq-git.t
test-mq-header-date.t
test-mq-header-from.t
@@ -308,9 +326,12 @@
test-obsolete-checkheads.t
test-obsolete-distributed.t
test-obsolete-tag-cache.t
+test-pager.t
test-parents.t
+test-parseindex2.py
test-pathconflicts-merge.t
test-pathconflicts-update.t
+test-pathencode.py
test-pending.t
test-permissions.t
test-phases.t
@@ -320,6 +341,7 @@
test-pull-pull-corruption.t
test-pull-r.t
test-pull-update.t
+test-pull.t
test-purge.t
test-push-checkheads-partial-C1.t
test-push-checkheads-partial-C2.t
@@ -350,6 +372,7 @@
test-push-checkheads-unpushed-D7.t
test-push-http.t
test-push-warn.t
+test-push.t
test-pushvars.t
test-rebase-abort.t
test-rebase-base-flag.t
@@ -378,6 +401,7 @@
test-rebase-scenario-global.t
test-rebase-templates.t
test-rebase-transaction.t
+test-rebuildstate.t
test-record.t
test-relink.t
test-remove.t
@@ -389,11 +413,13 @@
test-repo-compengines.t
test-resolve.t
test-revert-flags.t
+test-revert-interactive.t
test-revert-unknown.t
test-revlog-ancestry.py
test-revlog-group-emptyiter.t
test-revlog-mmapindex.t
test-revlog-packentry.t
+test-revlog-raw.py
test-revset-dirstate-parents.t
test-revset-legacy-lookup.t
test-revset-outgoing.t
@@ -409,34 +435,47 @@
test-show-work.t
test-show.t
test-simple-update.t
+test-simplekeyvaluefile.py
+test-simplemerge.py
test-single-head.t
test-sparse-clear.t
+test-sparse-clone.t
test-sparse-import.t
test-sparse-merges.t
test-sparse-profiles.t
test-sparse-requirement.t
test-sparse-verbose-json.t
+test-sparse.t
+test-split.t
test-ssh-clone-r.t
test-ssh-proto.t
test-sshserver.py
test-stack.t
+test-status-inprocess.py
test-status-rev.t
test-status-terse.t
test-strip-cross.t
test-strip.t
test-subrepo-deep-nested-change.t
test-subrepo-missing.t
+test-subrepo-paths.t
test-subrepo-recursion.t
test-subrepo-relative-path.t
test-subrepo.t
+test-symlink-os-yes-fs-no.py
+test-symlink-placeholder.t
test-symlinks.t
test-tag.t
test-tags.t
test-template-engine.t
test-treemanifest.t
+test-ui-color.py
+test-ui-config.py
+test-ui-verbosity.py
test-unamend.t
test-uncommit.t
test-unified-test.t
+test-unionrepo.t
test-unrelated-pull.t
test-up-local-change.t
test-update-branches.t
@@ -447,11 +486,16 @@
test-upgrade-repo.t
test-url-download.t
test-url-rev.t
+test-url.py
test-username-newline.t
test-verify.t
+test-walk.t
+test-walkrepo.py
test-websub.t
test-win32text.t
test-wireproto-clientreactor.py
test-wireproto-framing.py
test-wireproto-serverreactor.py
+test-wireproto.py
+test-wsgirequest.py
test-xdg.t
--- a/contrib/wix/help.wxs Wed Jun 06 13:28:49 2018 -0400
+++ b/contrib/wix/help.wxs Wed Jun 06 13:31:24 2018 -0400
@@ -19,6 +19,7 @@
<File Name="color.txt" />
<File Name="config.txt" KeyPath="yes" />
<File Name="dates.txt" />
+ <File Name="deprecated.txt" />
<File Name="diffs.txt" />
<File Name="environment.txt" />
<File Name="extensions.txt" />
--- a/hgdemandimport/__init__.py Wed Jun 06 13:28:49 2018 -0400
+++ b/hgdemandimport/__init__.py Wed Jun 06 13:31:24 2018 -0400
@@ -21,8 +21,9 @@
else:
from . import demandimportpy2 as demandimport
-# Extensions can add to this list if necessary.
-ignore = [
+# Full module names which can't be lazy imported.
+# Extensions can add to this set.
+IGNORES = {
'__future__',
'_hashlib',
# ImportError during pkg_resources/__init__.py:fixup_namespace_package
@@ -55,17 +56,15 @@
'__builtin__',
'builtins',
'urwid.command_map', # for pudb
- ]
+}
_pypy = '__pypy__' in sys.builtin_module_names
if _pypy:
- ignore.extend([
- # _ctypes.pointer is shadowed by "from ... import pointer" (PyPy 5)
- '_ctypes.pointer',
- ])
+ # _ctypes.pointer is shadowed by "from ... import pointer" (PyPy 5)
+ IGNORES.add('_ctypes.pointer')
-demandimport.init(ignore)
+demandimport.init(IGNORES)
# Re-export.
isenabled = demandimport.isenabled
--- a/hgdemandimport/demandimportpy2.py Wed Jun 06 13:28:49 2018 -0400
+++ b/hgdemandimport/demandimportpy2.py Wed Jun 06 13:31:24 2018 -0400
@@ -162,7 +162,7 @@
_pypy = '__pypy__' in sys.builtin_module_names
def _demandimport(name, globals=None, locals=None, fromlist=None, level=-1):
- if locals is None or name in ignore or fromlist == ('*',):
+ if locals is None or name in ignores or fromlist == ('*',):
# these cases we can't really delay
return _hgextimport(_origimport, name, globals, locals, fromlist, level)
elif not fromlist:
@@ -209,7 +209,7 @@
# while processing the import statement.
return
mn = '%s.%s' % (mod.__name__, attr)
- if mn in ignore:
+ if mn in ignores:
importfunc = _origimport
else:
importfunc = _demandmod
@@ -273,11 +273,11 @@
return mod
-ignore = []
+ignores = set()
-def init(ignorelist):
- global ignore
- ignore = ignorelist
+def init(ignoreset):
+ global ignores
+ ignores = ignoreset
def isenabled():
return builtins.__import__ == _demandimport
--- a/hgdemandimport/demandimportpy3.py Wed Jun 06 13:28:49 2018 -0400
+++ b/hgdemandimport/demandimportpy3.py Wed Jun 06 13:31:24 2018 -0400
@@ -40,7 +40,7 @@
"""
def exec_module(self, module):
"""Make the module load lazily."""
- if _deactivated or module.__name__ in ignore:
+ if _deactivated or module.__name__ in ignores:
self.loader.exec_module(module)
else:
super().exec_module(module)
@@ -62,11 +62,11 @@
(_bytecode_loader, importlib.machinery.BYTECODE_SUFFIXES),
)
-ignore = []
+ignores = set()
-def init(ignorelist):
- global ignore
- ignore = ignorelist
+def init(ignoreset):
+ global ignores
+ ignores = ignoreset
def isenabled():
return _makefinder in sys.path_hooks and not _deactivated
--- a/hgext/churn.py Wed Jun 06 13:28:49 2018 -0400
+++ b/hgext/churn.py Wed Jun 06 13:31:24 2018 -0400
@@ -52,7 +52,7 @@
def getkey(ctx):
t, tz = ctx.date()
date = datetime.datetime(*time.gmtime(float(t) - tz)[:6])
- return date.strftime(opts['dateformat'])
+ return date.strftime(encoding.strfromlocal(opts['dateformat']))
else:
tmpl = opts.get('oldtemplate') or opts.get('template')
tmpl = logcmdutil.maketemplater(ui, repo, tmpl)
@@ -161,7 +161,7 @@
if not aliases and os.path.exists(repo.wjoin('.hgchurn')):
aliases = repo.wjoin('.hgchurn')
if aliases:
- for l in open(aliases, "r"):
+ for l in open(aliases, "rb"):
try:
alias, actual = l.rsplit('=' in l and '=' or None, 1)
amap[alias.strip()] = actual.strip()
--- a/hgext/convert/bzr.py Wed Jun 06 13:28:49 2018 -0400
+++ b/hgext/convert/bzr.py Wed Jun 06 13:31:24 2018 -0400
@@ -19,7 +19,7 @@
from . import common
# these do not work with demandimport, blacklist
-demandimport.ignore.extend([
+demandimport.IGNORES.update([
'bzrlib.transactions',
'bzrlib.urlutils',
'ElementPath',
--- a/hgext/convert/convcmd.py Wed Jun 06 13:28:49 2018 -0400
+++ b/hgext/convert/convcmd.py Wed Jun 06 13:31:24 2018 -0400
@@ -234,7 +234,7 @@
def walktree(self, heads):
'''Return a mapping that identifies the uncommitted parents of every
uncommitted changeset.'''
- visit = heads
+ visit = list(heads)
known = set()
parents = {}
numcommits = self.source.numcommits()
--- a/hgext/convert/cvsps.py Wed Jun 06 13:28:49 2018 -0400
+++ b/hgext/convert/cvsps.py Wed Jun 06 13:31:24 2018 -0400
@@ -6,6 +6,7 @@
# GNU General Public License version 2 or any later version.
from __future__ import absolute_import
+import functools
import os
import re
@@ -110,25 +111,25 @@
log = [] # list of logentry objects containing the CVS state
# patterns to match in CVS (r)log output, by state of use
- re_00 = re.compile('RCS file: (.+)$')
- re_01 = re.compile('cvs \\[r?log aborted\\]: (.+)$')
- re_02 = re.compile('cvs (r?log|server): (.+)\n$')
- re_03 = re.compile("(Cannot access.+CVSROOT)|"
- "(can't create temporary directory.+)$")
- re_10 = re.compile('Working file: (.+)$')
- re_20 = re.compile('symbolic names:')
- re_30 = re.compile('\t(.+): ([\\d.]+)$')
- re_31 = re.compile('----------------------------$')
- re_32 = re.compile('======================================='
- '======================================$')
- re_50 = re.compile('revision ([\\d.]+)(\s+locked by:\s+.+;)?$')
- re_60 = re.compile(r'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);'
- r'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?'
- r'(\s+commitid:\s+([^;]+);)?'
- r'(.*mergepoint:\s+([^;]+);)?')
- re_70 = re.compile('branches: (.+);$')
+ re_00 = re.compile(b'RCS file: (.+)$')
+ re_01 = re.compile(b'cvs \\[r?log aborted\\]: (.+)$')
+ re_02 = re.compile(b'cvs (r?log|server): (.+)\n$')
+ re_03 = re.compile(b"(Cannot access.+CVSROOT)|"
+ b"(can't create temporary directory.+)$")
+ re_10 = re.compile(b'Working file: (.+)$')
+ re_20 = re.compile(b'symbolic names:')
+ re_30 = re.compile(b'\t(.+): ([\\d.]+)$')
+ re_31 = re.compile(b'----------------------------$')
+ re_32 = re.compile(b'======================================='
+ b'======================================$')
+ re_50 = re.compile(b'revision ([\\d.]+)(\s+locked by:\s+.+;)?$')
+ re_60 = re.compile(br'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);'
+ br'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?'
+ br'(\s+commitid:\s+([^;]+);)?'
+ br'(.*mergepoint:\s+([^;]+);)?')
+ re_70 = re.compile(b'branches: (.+);$')
- file_added_re = re.compile(r'file [^/]+ was (initially )?added on branch')
+ file_added_re = re.compile(br'file [^/]+ was (initially )?added on branch')
prefix = '' # leading path to strip of what we get from CVS
@@ -509,7 +510,8 @@
comment = entry.comment
for e in encodings:
try:
- entry.comment = comment.decode(e).encode('utf-8')
+ entry.comment = comment.decode(
+ pycompat.sysstr(e)).encode('utf-8')
if ui.debugflag:
ui.debug("transcoding by %s: %s of %s\n" %
(e, revstr(entry.revision), entry.file))
@@ -653,7 +655,7 @@
return 0
for c in changesets:
- c.entries.sort(entitycompare)
+ c.entries.sort(key=functools.cmp_to_key(entitycompare))
# Sort changesets by date
@@ -706,7 +708,7 @@
d = c(len(l.branchpoints), len(r.branchpoints))
return d
- changesets.sort(cscmp)
+ changesets.sort(key=functools.cmp_to_key(cscmp))
# Collect tags
@@ -729,12 +731,12 @@
# {{mergefrombranch BRANCHNAME}} by setting two parents.
if mergeto is None:
- mergeto = r'{{mergetobranch ([-\w]+)}}'
+ mergeto = br'{{mergetobranch ([-\w]+)}}'
if mergeto:
mergeto = re.compile(mergeto)
if mergefrom is None:
- mergefrom = r'{{mergefrombranch ([-\w]+)}}'
+ mergefrom = br'{{mergefrombranch ([-\w]+)}}'
if mergefrom:
mergefrom = re.compile(mergefrom)
@@ -797,7 +799,7 @@
except KeyError:
ui.warn(_("warning: CVS commit message references "
"non-existent branch %r:\n%s\n")
- % (m, c.comment))
+ % (pycompat.bytestr(m), c.comment))
if m in branches and c.branch != m and not candidate.synthetic:
c.parents.append(candidate)
@@ -940,7 +942,8 @@
if fn.startswith(opts["prefix"]):
fn = fn[len(opts["prefix"]):]
ui.write('\t%s:%s->%s%s \n' % (
- fn, '.'.join([str(x) for x in f.parent]) or 'INITIAL',
+ fn,
+ '.'.join([b"%d" % x for x in f.parent]) or 'INITIAL',
'.'.join([(b"%d" % x) for x in f.revision]),
['', '(DEAD)'][f.dead]))
ui.write('\n')
--- a/hgext/convert/darcs.py Wed Jun 06 13:28:49 2018 -0400
+++ b/hgext/convert/darcs.py Wed Jun 06 13:31:24 2018 -0400
@@ -10,10 +10,11 @@
import os
import re
import shutil
-import tempfile
+
from mercurial.i18n import _
from mercurial import (
error,
+ pycompat,
util,
)
from mercurial.utils import dateutil
@@ -76,7 +77,7 @@
self.ui.warn(_('failed to detect repository format!'))
def before(self):
- self.tmppath = tempfile.mkdtemp(
+ self.tmppath = pycompat.mkdtemp(
prefix='convert-' + os.path.basename(self.path) + '-')
output, status = self.run('init', repodir=self.tmppath)
self.checkexit(status)
--- a/hgext/convert/subversion.py Wed Jun 06 13:28:49 2018 -0400
+++ b/hgext/convert/subversion.py Wed Jun 06 13:31:24 2018 -0400
@@ -5,7 +5,6 @@
import os
import re
-import tempfile
import xml.dom.minidom
from mercurial.i18n import _
@@ -1081,7 +1080,7 @@
' hg executable is in PATH'))
return logstream(stdout)
-pre_revprop_change = '''#!/bin/sh
+pre_revprop_change = b'''#!/bin/sh
REPOS="$1"
REV="$2"
@@ -1098,8 +1097,8 @@
'''
class svn_sink(converter_sink, commandline):
- commit_re = re.compile(r'Committed revision (\d+).', re.M)
- uuid_re = re.compile(r'Repository UUID:\s*(\S+)', re.M)
+ commit_re = re.compile(br'Committed revision (\d+).', re.M)
+ uuid_re = re.compile(br'Repository UUID:\s*(\S+)', re.M)
def prerun(self):
if self.wc:
@@ -1225,7 +1224,7 @@
wdest = self.wjoin(dest)
exists = os.path.lexists(wdest)
if exists:
- fd, tempname = tempfile.mkstemp(
+ fd, tempname = pycompat.mkstemp(
prefix='hg-copy-', dir=os.path.dirname(wdest))
os.close(fd)
os.unlink(tempname)
@@ -1313,7 +1312,7 @@
self.xargs(self.setexec, 'propset', 'svn:executable', '*')
self.setexec = []
- fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
+ fd, messagefile = pycompat.mkstemp(prefix='hg-convert-')
fp = os.fdopen(fd, r'wb')
fp.write(util.tonativeeol(commit.desc))
fp.close()
--- a/hgext/extdiff.py Wed Jun 06 13:28:49 2018 -0400
+++ b/hgext/extdiff.py Wed Jun 06 13:31:24 2018 -0400
@@ -71,7 +71,7 @@
import re
import shutil
import stat
-import tempfile
+
from mercurial.i18n import _
from mercurial.node import (
nullid,
@@ -210,7 +210,7 @@
if not common:
return 0
- tmproot = tempfile.mkdtemp(prefix='extdiff.')
+ tmproot = pycompat.mkdtemp(prefix='extdiff.')
try:
if not opts.get('patch'):
# Always make a copy of node1a (and node1b, if applicable)
--- a/hgext/githelp.py Wed Jun 06 13:28:49 2018 -0400
+++ b/hgext/githelp.py Wed Jun 06 13:31:24 2018 -0400
@@ -67,7 +67,7 @@
cmd = args[0]
if not cmd in gitcommands:
- raise error.Abort("error: unknown git command %s" % (cmd))
+ raise error.Abort(_("error: unknown git command %s") % (cmd))
ui.pager('githelp')
args = args[1:]
@@ -90,14 +90,13 @@
elif ('-' + ex.opt) in ex.msg:
flag = '-' + ex.opt
else:
- raise error.Abort("unknown option %s" % ex.opt)
+ raise error.Abort(_("unknown option %s") % ex.opt)
try:
args.remove(flag)
except Exception:
- raise error.Abort(
- "unknown option {0} packed with other options\n"
- "Please try passing the option as it's own flag: -{0}" \
- .format(ex.opt))
+ msg = _("unknown option '%s' packed with other options")
+ hint = _("please try passing the option as its own flag: -%s")
+ raise error.Abort(msg % ex.opt, hint=hint % ex.opt)
ui.warn(_("ignoring unknown option %s\n") % flag)
@@ -171,7 +170,7 @@
cmd.extend(args)
else:
ui.status(_("note: use hg addremove to remove files that have "
- "been deleted.\n\n"))
+ "been deleted\n\n"))
ui.status((bytes(cmd)), "\n")
@@ -196,7 +195,7 @@
ui.status((bytes(cmd)), "\n")
def bisect(ui, repo, *args, **kwargs):
- ui.status(_("See 'hg help bisect' for how to use bisect.\n\n"))
+ ui.status(_("see 'hg help bisect' for how to use bisect\n\n"))
def blame(ui, repo, *args, **kwargs):
cmdoptions = [
@@ -236,6 +235,8 @@
# shell command to output the active bookmark for the active
# revision
old = '`hg log -T"{activebookmark}" -r .`'
+ else:
+ raise error.Abort(_('missing newbranch argument'))
new = args[0]
cmd['-m'] = old
cmd.append(new)
@@ -334,7 +335,7 @@
cmd = Command('revert')
cmd['--all'] = None
else:
- raise error.Abort("a commit must be specified")
+ raise error.Abort(_("a commit must be specified"))
ui.status((bytes(cmd)), "\n")
@@ -353,7 +354,7 @@
if opts.get('continue'):
cmd['--continue'] = None
elif opts.get('abort'):
- ui.status(_("note: hg graft does not have --abort.\n\n"))
+ ui.status(_("note: hg graft does not have --abort\n\n"))
return
else:
cmd.extend(args)
@@ -384,7 +385,7 @@
args, opts = parseoptions(ui, cmdoptions, args)
if len(args) == 0:
- raise error.Abort("a repository to clone must be specified")
+ raise error.Abort(_("a repository to clone must be specified"))
cmd = Command('clone')
cmd.append(args[0])
@@ -393,8 +394,8 @@
if opts.get('bare'):
cmd['-U'] = None
- ui.status(_("note: Mercurial does not have bare clones. " +
- "-U will clone the repo without checking out a commit\n\n"))
+ ui.status(_("note: Mercurial does not have bare clones. "
+ "-U will clone the repo without checking out a commit\n\n"))
elif opts.get('no_checkout'):
cmd['-U'] = None
@@ -436,9 +437,9 @@
cmd['-m'] = "'%s'" % (opts.get('message'),)
if opts.get('all'):
- ui.status(_("note: Mercurial doesn't have a staging area, " +
- "so there is no --all. -A will add and remove files " +
- "for you though.\n\n"))
+ ui.status(_("note: Mercurial doesn't have a staging area, "
+ "so there is no --all. -A will add and remove files "
+ "for you though.\n\n"))
if opts.get('file'):
cmd['-l'] = opts.get('file')
@@ -454,8 +455,8 @@
ui.status((bytes(cmd)), "\n")
def deprecated(ui, repo, *args, **kwargs):
- ui.warn(_('This command has been deprecated in the git project, ' +
- 'thus isn\'t supported by this tool.\n\n'))
+ ui.warn(_('this command has been deprecated in the git project, '
+ 'thus isn\'t supported by this tool\n\n'))
def diff(ui, repo, *args, **kwargs):
cmdoptions = [
@@ -468,8 +469,8 @@
cmd = Command('diff')
if opts.get('cached'):
- ui.status(_('note: Mercurial has no concept of a staging area, ' +
- 'so --cached does nothing.\n\n'))
+ ui.status(_('note: Mercurial has no concept of a staging area, '
+ 'so --cached does nothing\n\n'))
if opts.get('reverse'):
cmd['--reverse'] = None
@@ -505,10 +506,10 @@
if len(args) > 0:
cmd.append(args[0])
if len(args) > 1:
- ui.status(_("note: Mercurial doesn't have refspecs. " +
- "-r can be used to specify which commits you want to pull. " +
- "-B can be used to specify which bookmark you want to pull." +
- "\n\n"))
+ ui.status(_("note: Mercurial doesn't have refspecs. "
+ "-r can be used to specify which commits you want to "
+ "pull. -B can be used to specify which bookmark you "
+ "want to pull.\n\n"))
for v in args[1:]:
if v in repo._bookmarks:
cmd['-B'] = v
@@ -556,10 +557,10 @@
('p', 'patch', None, ''),
]
args, opts = parseoptions(ui, cmdoptions, args)
- ui.status(_('note: -v prints the entire commit message like Git does. To ' +
- 'print just the first line, drop the -v.\n\n'))
- ui.status(_("note: see hg help revset for information on how to filter " +
- "log output.\n\n"))
+ ui.status(_('note: -v prints the entire commit message like Git does. To '
+ 'print just the first line, drop the -v.\n\n'))
+ ui.status(_("note: see hg help revset for information on how to filter "
+ "log output\n\n"))
cmd = Command('log')
cmd['-v'] = None
@@ -578,13 +579,13 @@
if opts.get('pretty') or opts.get('format') or opts.get('oneline'):
format = opts.get('format', '')
if 'format:' in format:
- ui.status(_("note: --format format:??? equates to Mercurial's " +
- "--template. See hg help templates for more info.\n\n"))
+ ui.status(_("note: --format format:??? equates to Mercurial's "
+ "--template. See hg help templates for more info.\n\n"))
cmd['--template'] = '???'
else:
- ui.status(_("note: --pretty/format/oneline equate to Mercurial's " +
- "--style or --template. See hg help templates for more info." +
- "\n\n"))
+ ui.status(_("note: --pretty/format/oneline equate to Mercurial's "
+ "--style or --template. See hg help templates for "
+ "more info.\n\n"))
cmd['--style'] = '???'
if len(args) > 0:
@@ -654,8 +655,8 @@
cmd = Command("log -T '{node}\\n' -r 'ancestor(%s,%s)'"
% (args[0], args[1]))
- ui.status(_('NOTE: ancestors() is part of the revset language.\n'),
- _("Learn more about revsets with 'hg help revsets'\n\n"))
+ ui.status(_('note: ancestors() is part of the revset language\n'),
+ _("(learn more about revsets with 'hg help revsets')\n\n"))
ui.status((bytes(cmd)), "\n")
def mergetool(ui, repo, *args, **kwargs):
@@ -697,10 +698,10 @@
if len(args) > 0:
cmd.append(args[0])
if len(args) > 1:
- ui.status(_("note: Mercurial doesn't have refspecs. " +
- "-r can be used to specify which commits you want to pull. " +
- "-B can be used to specify which bookmark you want to pull." +
- "\n\n"))
+ ui.status(_("note: Mercurial doesn't have refspecs. "
+ "-r can be used to specify which commits you want to "
+ "pull. -B can be used to specify which bookmark you "
+ "want to pull.\n\n"))
for v in args[1:]:
if v in repo._bookmarks:
cmd['-B'] = v
@@ -721,10 +722,10 @@
if len(args) > 0:
cmd.append(args[0])
if len(args) > 1:
- ui.status(_("note: Mercurial doesn't have refspecs. " +
- "-r can be used to specify which commits you want to push. " +
- "-B can be used to specify which bookmark you want to push." +
- "\n\n"))
+ ui.status(_("note: Mercurial doesn't have refspecs. "
+ "-r can be used to specify which commits you want "
+ "to push. -B can be used to specify which bookmark "
+ "you want to push.\n\n"))
for v in args[1:]:
if v in repo._bookmarks:
cmd['-B'] = v
@@ -748,12 +749,12 @@
args, opts = parseoptions(ui, cmdoptions, args)
if opts.get('interactive'):
- ui.status(_("note: hg histedit does not perform a rebase. " +
- "It just edits history.\n\n"))
+ ui.status(_("note: hg histedit does not perform a rebase. "
+ "It just edits history.\n\n"))
cmd = Command('histedit')
if len(args) > 0:
ui.status(_("also note: 'hg histedit' will automatically detect"
- " your stack, so no second argument is necessary.\n\n"))
+ " your stack, so no second argument is necessary\n\n"))
ui.status((bytes(cmd)), "\n")
return
@@ -769,12 +770,12 @@
cmd['--abort'] = None
if opts.get('onto'):
- ui.status(_("note: if you're trying to lift a commit off one branch, " +
- "try hg rebase -d <destination commit> -s <commit to be lifted>" +
- "\n\n"))
+ ui.status(_("note: if you're trying to lift a commit off one branch, "
+ "try hg rebase -d <destination commit> -s <commit to be "
+ "lifted>\n\n"))
cmd['-d'] = convert(opts.get('onto'))
if len(args) < 2:
- raise error.Abort("Expected format: git rebase --onto X Y Z")
+ raise error.Abort(_("expected format: git rebase --onto X Y Z"))
cmd['-s'] = "'::%s - ::%s'" % (convert(args[1]), convert(args[0]))
else:
if len(args) == 1:
@@ -799,7 +800,7 @@
ui.status(bytes(cmd), "\n\n")
ui.status(_("note: in hg commits can be deleted from repo but we always"
- " have backups.\n"))
+ " have backups\n"))
def reset(ui, repo, *args, **kwargs):
cmdoptions = [
@@ -813,10 +814,10 @@
hard = opts.get('hard')
if opts.get('mixed'):
- ui.status(_('NOTE: --mixed has no meaning since Mercurial has no '
+ ui.status(_('note: --mixed has no meaning since Mercurial has no '
'staging area\n\n'))
if opts.get('soft'):
- ui.status(_('NOTE: --soft has no meaning since Mercurial has no '
+ ui.status(_('note: --soft has no meaning since Mercurial has no '
'staging area\n\n'))
cmd = Command('update')
@@ -833,7 +834,7 @@
args, opts = parseoptions(ui, cmdoptions, args)
if len(args) > 1:
- ui.status(_("note: hg backout doesn't support multiple commits at " +
+ ui.status(_("note: hg backout doesn't support multiple commits at "
"once\n\n"))
cmd = Command('backout')
@@ -930,8 +931,8 @@
cmd['--keep'] = None
elif (action == 'branch' or action == 'show' or action == 'clear'
or action == 'create'):
- ui.status(_("note: Mercurial doesn't have equivalents to the " +
- "git stash branch, show, clear, or create actions.\n\n"))
+ ui.status(_("note: Mercurial doesn't have equivalents to the "
+ "git stash branch, show, clear, or create actions\n\n"))
return
else:
if len(args) > 0:
@@ -957,9 +958,11 @@
ui.status((bytes(cmd)), "\n")
def svn(ui, repo, *args, **kwargs):
+ if not args:
+ raise error.Abort(_('missing svn command'))
svncmd = args[0]
- if not svncmd in gitsvncommands:
- ui.warn(_("error: unknown git svn command %s\n") % (svncmd))
+ if svncmd not in gitsvncommands:
+ raise error.Abort(_('unknown git svn command "%s"') % (svncmd))
args = args[1:]
return gitsvncommands[svncmd](ui, repo, *args, **kwargs)
@@ -988,6 +991,9 @@
]
args, opts = parseoptions(ui, cmdoptions, args)
+ if not args:
+ raise error.Abort(_('missing find-rev argument'))
+
cmd = Command('log')
cmd['-r'] = args[0]
@@ -1020,6 +1026,10 @@
cmd = Command('tags')
else:
cmd = Command('tag')
+
+ if not args:
+ raise error.Abort(_('missing tag argument'))
+
cmd.append(args[0])
if len(args) > 1:
cmd['-r'] = args[1]
--- a/hgext/gpg.py Wed Jun 06 13:28:49 2018 -0400
+++ b/hgext/gpg.py Wed Jun 06 13:31:24 2018 -0400
@@ -9,7 +9,6 @@
import binascii
import os
-import tempfile
from mercurial.i18n import _
from mercurial import (
@@ -61,11 +60,11 @@
sigfile = datafile = None
try:
# create temporary files
- fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig")
+ fd, sigfile = pycompat.mkstemp(prefix="hg-gpg-", suffix=".sig")
fp = os.fdopen(fd, r'wb')
fp.write(sig)
fp.close()
- fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt")
+ fd, datafile = pycompat.mkstemp(prefix="hg-gpg-", suffix=".txt")
fp = os.fdopen(fd, r'wb')
fp.write(data)
fp.close()
--- a/hgext/highlight/highlight.py Wed Jun 06 13:28:49 2018 -0400
+++ b/hgext/highlight/highlight.py Wed Jun 06 13:31:24 2018 -0400
@@ -11,7 +11,7 @@
from __future__ import absolute_import
from mercurial import demandimport
-demandimport.ignore.extend(['pkgutil', 'pkg_resources', '__main__'])
+demandimport.IGNORES.update(['pkgutil', 'pkg_resources', '__main__'])
from mercurial import (
encoding,
--- a/hgext/infinitepush/__init__.py Wed Jun 06 13:28:49 2018 -0400
+++ b/hgext/infinitepush/__init__.py Wed Jun 06 13:31:24 2018 -0400
@@ -94,7 +94,6 @@
import re
import socket
import subprocess
-import tempfile
import time
from mercurial.node import (
@@ -565,19 +564,19 @@
if isinstance(localkey, str) and _scratchbranchmatcher(localkey):
scratchnode = repo.bundlestore.index.getnode(localkey)
if scratchnode:
- return "%s %s\n" % (1, scratchnode)
+ return "%d %s\n" % (1, scratchnode)
else:
- return "%s %s\n" % (0, 'scratch branch %s not found' % localkey)
+ return "%d %s\n" % (0, 'scratch branch %s not found' % localkey)
else:
try:
r = hex(repo.lookup(localkey))
- return "%s %s\n" % (1, r)
+ return "%d %s\n" % (1, r)
except Exception as inst:
if repo.bundlestore.index.getbundle(localkey):
- return "%s %s\n" % (1, localkey)
+ return "%d %s\n" % (1, localkey)
else:
- r = str(inst)
- return "%s %s\n" % (0, r)
+ r = stringutil.forcebytestr(inst)
+ return "%d %s\n" % (0, r)
return _lookup
def _pull(orig, ui, repo, source="default", **opts):
@@ -912,7 +911,7 @@
# storing the bundle in the bundlestore
buf = util.chunkbuffer(bundler.getchunks())
- fd, bundlefile = tempfile.mkstemp()
+ fd, bundlefile = pycompat.mkstemp()
try:
try:
fp = os.fdopen(fd, r'wb')
@@ -998,7 +997,7 @@
# If commits were sent, store them
if cgparams:
buf = util.chunkbuffer(bundler.getchunks())
- fd, bundlefile = tempfile.mkstemp()
+ fd, bundlefile = pycompat.mkstemp()
try:
try:
fp = os.fdopen(fd, r'wb')
@@ -1110,7 +1109,7 @@
bundler.addpart(cgpart)
buf = util.chunkbuffer(bundler.getchunks())
- fd, bundlefile = tempfile.mkstemp()
+ fd, bundlefile = pycompat.mkstemp()
try:
try:
fp = os.fdopen(fd, r'wb')
--- a/hgext/infinitepush/common.py Wed Jun 06 13:28:49 2018 -0400
+++ b/hgext/infinitepush/common.py Wed Jun 06 13:31:24 2018 -0400
@@ -6,13 +6,13 @@
from __future__ import absolute_import
import os
-import tempfile
from mercurial.node import hex
from mercurial import (
error,
extensions,
+ pycompat,
)
def isremotebooksenabled(ui):
@@ -30,7 +30,7 @@
def _makebundlefromraw(data):
fp = None
- fd, bundlefile = tempfile.mkstemp()
+ fd, bundlefile = pycompat.mkstemp()
try: # guards bundlefile
try: # guards fp
fp = os.fdopen(fd, 'wb')
--- a/hgext/infinitepush/store.py Wed Jun 06 13:28:49 2018 -0400
+++ b/hgext/infinitepush/store.py Wed Jun 06 13:31:24 2018 -0400
@@ -120,6 +120,8 @@
def write(self, data):
# Won't work on windows because you can't open file second time without
# closing it
+ # TODO: rewrite without str.format() and replace NamedTemporaryFile()
+ # with pycompat.namedtempfile()
with NamedTemporaryFile() as temp:
temp.write(data)
temp.flush()
@@ -142,6 +144,8 @@
def read(self, handle):
# Won't work on windows because you can't open file second time without
# closing it
+ # TODO: rewrite without str.format() and replace NamedTemporaryFile()
+ # with pycompat.namedtempfile()
with NamedTemporaryFile() as temp:
formatted_args = [arg.format(filename=temp.name, handle=handle)
for arg in self.get_args]
--- a/hgext/keyword.py Wed Jun 06 13:28:49 2018 -0400
+++ b/hgext/keyword.py Wed Jun 06 13:31:24 2018 -0400
@@ -87,7 +87,6 @@
import os
import re
-import tempfile
import weakref
from mercurial.i18n import _
@@ -434,7 +433,7 @@
ui.write('%s = %s\n' % (k, v))
fn = 'demo.txt'
- tmpdir = tempfile.mkdtemp('', 'kwdemo.')
+ tmpdir = pycompat.mkdtemp('', 'kwdemo.')
ui.note(_('creating temporary repository at %s\n') % tmpdir)
if repo is None:
baseui = ui
--- a/hgext/lfs/pointer.py Wed Jun 06 13:28:49 2018 -0400
+++ b/hgext/lfs/pointer.py Wed Jun 06 13:31:24 2018 -0400
@@ -15,6 +15,9 @@
error,
pycompat,
)
+from mercurial.utils import (
+ stringutil,
+)
class InvalidPointer(error.RevlogError):
pass
@@ -32,7 +35,8 @@
try:
return cls(l.split(' ', 1) for l in text.splitlines()).validate()
except ValueError: # l.split returns 1 item instead of 2
- raise InvalidPointer(_('cannot parse git-lfs text: %r') % text)
+ raise InvalidPointer(_('cannot parse git-lfs text: %s')
+ % stringutil.pprint(text))
def serialize(self):
sortkeyfunc = lambda x: (x[0] != 'version', x)
@@ -61,15 +65,19 @@
for k, v in self.iteritems():
if k in self._requiredre:
if not self._requiredre[k].match(v):
- raise InvalidPointer(_('unexpected value: %s=%r') % (k, v))
+ raise InvalidPointer(
+ _('unexpected lfs pointer value: %s=%s')
+ % (k, stringutil.pprint(v)))
requiredcount += 1
elif not self._keyre.match(k):
- raise InvalidPointer(_('unexpected key: %s') % k)
+ raise InvalidPointer(_('unexpected lfs pointer key: %s') % k)
if not self._valuere.match(v):
- raise InvalidPointer(_('unexpected value: %s=%r') % (k, v))
+ raise InvalidPointer(_('unexpected lfs pointer value: %s=%s')
+ % (k, stringutil.pprint(v)))
if len(self._requiredre) != requiredcount:
miss = sorted(set(self._requiredre.keys()).difference(self.keys()))
- raise InvalidPointer(_('missed keys: %s') % ', '.join(miss))
+ raise InvalidPointer(_('missing lfs pointer keys: %s')
+ % ', '.join(miss))
return self
deserialize = gitlfspointer.deserialize
--- a/hgext/mq.py Wed Jun 06 13:28:49 2018 -0400
+++ b/hgext/mq.py Wed Jun 06 13:31:24 2018 -0400
@@ -492,7 +492,8 @@
n, name = entry
yield statusentry(bin(n), name)
elif l.strip():
- self.ui.warn(_('malformated mq status line: %s\n') % entry)
+ self.ui.warn(_('malformated mq status line: %s\n') %
+ stringutil.pprint(entry))
# else we ignore empty lines
try:
lines = self.opener.read(self.statuspath).splitlines()
@@ -2872,7 +2873,7 @@
patch = None
args = list(args)
if opts.get(r'list'):
- if args or opts.get('none'):
+ if args or opts.get(r'none'):
raise error.Abort(_('cannot mix -l/--list with options or '
'arguments'))
for i in xrange(len(q.series)):
@@ -2886,7 +2887,7 @@
patch = args.pop(0)
if patch is None:
raise error.Abort(_('no patch to work with'))
- if args or opts.get('none'):
+ if args or opts.get(r'none'):
idx = q.findseries(patch)
if idx is None:
raise error.Abort(_('no patch named %s') % patch)
--- a/hgext/narrow/__init__.py Wed Jun 06 13:28:49 2018 -0400
+++ b/hgext/narrow/__init__.py Wed Jun 06 13:31:24 2018 -0400
@@ -28,8 +28,6 @@
narrowchangegroup,
narrowcommands,
narrowcopies,
- narrowdirstate,
- narrowmerge,
narrowpatch,
narrowrepo,
narrowrevlog,
@@ -64,7 +62,6 @@
localrepo.featuresetupfuncs.add(featuresetup)
narrowrevlog.setup()
narrowbundle2.setup()
- narrowmerge.setup()
narrowcommands.setup()
narrowchangegroup.setup()
narrowwirepeer.uisetup()
@@ -74,10 +71,9 @@
if not repo.local():
return
- narrowrepo.wraprepo(repo)
if changegroup.NARROW_REQUIREMENT in repo.requirements:
+ narrowrepo.wraprepo(repo)
narrowcopies.setup(repo)
- narrowdirstate.setup(repo)
narrowpatch.setup(repo)
narrowwirepeer.reposetup(repo)
--- a/hgext/narrow/narrowdirstate.py Wed Jun 06 13:28:49 2018 -0400
+++ b/hgext/narrow/narrowdirstate.py Wed Jun 06 13:31:24 2018 -0400
@@ -9,74 +9,91 @@
from mercurial.i18n import _
from mercurial import (
- dirstate,
error,
- extensions,
match as matchmod,
narrowspec,
util as hgutil,
)
-def setup(repo):
+def wrapdirstate(repo, dirstate):
"""Add narrow spec dirstate ignore, block changes outside narrow spec."""
- def walk(orig, self, match, subrepos, unknown, ignored, full=True,
- narrowonly=True):
- if narrowonly:
- # hack to not exclude explicitly-specified paths so that they can
- # be warned later on e.g. dirstate.add()
- em = matchmod.exact(match._root, match._cwd, match.files())
- nm = matchmod.unionmatcher([repo.narrowmatch(), em])
- match = matchmod.intersectmatchers(match, nm)
- return orig(self, match, subrepos, unknown, ignored, full)
-
- extensions.wrapfunction(dirstate.dirstate, 'walk', walk)
-
- # Prevent adding files that are outside the sparse checkout
- editfuncs = ['normal', 'add', 'normallookup', 'copy', 'remove', 'merge']
- for func in editfuncs:
- def _wrapper(orig, self, *args):
+ def _editfunc(fn):
+ def _wrapper(self, *args):
dirstate = repo.dirstate
narrowmatch = repo.narrowmatch()
for f in args:
if f is not None and not narrowmatch(f) and f not in dirstate:
raise error.Abort(_("cannot track '%s' - it is outside " +
"the narrow clone") % f)
- return orig(self, *args)
- extensions.wrapfunction(dirstate.dirstate, func, _wrapper)
-
- def filterrebuild(orig, self, parent, allfiles, changedfiles=None):
- if changedfiles is None:
- # Rebuilding entire dirstate, let's filter allfiles to match the
- # narrowspec.
- allfiles = [f for f in allfiles if repo.narrowmatch()(f)]
- orig(self, parent, allfiles, changedfiles)
-
- extensions.wrapfunction(dirstate.dirstate, 'rebuild', filterrebuild)
+ return fn(self, *args)
+ return _wrapper
def _narrowbackupname(backupname):
assert 'dirstate' in backupname
return backupname.replace('dirstate', narrowspec.FILENAME)
- def restorebackup(orig, self, tr, backupname):
- self._opener.rename(_narrowbackupname(backupname), narrowspec.FILENAME,
- checkambig=True)
- orig(self, tr, backupname)
+ class narrowdirstate(dirstate.__class__):
+ def walk(self, match, subrepos, unknown, ignored, full=True,
+ narrowonly=True):
+ if narrowonly:
+ # hack to not exclude explicitly-specified paths so that they
+ # can be warned later on e.g. dirstate.add()
+ em = matchmod.exact(match._root, match._cwd, match.files())
+ nm = matchmod.unionmatcher([repo.narrowmatch(), em])
+ match = matchmod.intersectmatchers(match, nm)
+ return super(narrowdirstate, self).walk(match, subrepos, unknown,
+ ignored, full)
- extensions.wrapfunction(dirstate.dirstate, 'restorebackup', restorebackup)
+ # Prevent adding/editing/copying/deleting files that are outside the
+ # sparse checkout
+ @_editfunc
+ def normal(self, *args):
+ return super(narrowdirstate, self).normal(*args)
- def savebackup(orig, self, tr, backupname):
- orig(self, tr, backupname)
+ @_editfunc
+ def add(self, *args):
+ return super(narrowdirstate, self).add(*args)
+
+ @_editfunc
+ def normallookup(self, *args):
+ return super(narrowdirstate, self).normallookup(*args)
+
+ @_editfunc
+ def copy(self, *args):
+ return super(narrowdirstate, self).copy(*args)
- narrowbackupname = _narrowbackupname(backupname)
- self._opener.tryunlink(narrowbackupname)
- hgutil.copyfile(self._opener.join(narrowspec.FILENAME),
- self._opener.join(narrowbackupname), hardlink=True)
+ @_editfunc
+ def remove(self, *args):
+ return super(narrowdirstate, self).remove(*args)
+
+ @_editfunc
+ def merge(self, *args):
+ return super(narrowdirstate, self).merge(*args)
+
+ def rebuild(self, parent, allfiles, changedfiles=None):
+ if changedfiles is None:
+ # Rebuilding entire dirstate, let's filter allfiles to match the
+ # narrowspec.
+ allfiles = [f for f in allfiles if repo.narrowmatch()(f)]
+ super(narrowdirstate, self).rebuild(parent, allfiles, changedfiles)
- extensions.wrapfunction(dirstate.dirstate, 'savebackup', savebackup)
+ def restorebackup(self, tr, backupname):
+ self._opener.rename(_narrowbackupname(backupname),
+ narrowspec.FILENAME, checkambig=True)
+ super(narrowdirstate, self).restorebackup(tr, backupname)
+
+ def savebackup(self, tr, backupname):
+ super(narrowdirstate, self).savebackup(tr, backupname)
- def clearbackup(orig, self, tr, backupname):
- orig(self, tr, backupname)
- self._opener.unlink(_narrowbackupname(backupname))
+ narrowbackupname = _narrowbackupname(backupname)
+ self._opener.tryunlink(narrowbackupname)
+ hgutil.copyfile(self._opener.join(narrowspec.FILENAME),
+ self._opener.join(narrowbackupname), hardlink=True)
- extensions.wrapfunction(dirstate.dirstate, 'clearbackup', clearbackup)
+ def clearbackup(self, tr, backupname):
+ super(narrowdirstate, self).clearbackup(tr, backupname)
+ self._opener.unlink(_narrowbackupname(backupname))
+
+ dirstate.__class__ = narrowdirstate
+ return dirstate
--- a/hgext/narrow/narrowmerge.py Wed Jun 06 13:28:49 2018 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,77 +0,0 @@
-# narrowmerge.py - extensions to mercurial merge module to support narrow clones
-#
-# Copyright 2017 Google, Inc.
-#
-# This software may be used and distributed according to the terms of the
-# GNU General Public License version 2 or any later version.
-
-from __future__ import absolute_import
-
-from mercurial.i18n import _
-from mercurial import (
- copies,
- error,
- extensions,
- merge,
-)
-
-def setup():
- def _manifestmerge(orig, repo, wctx, p2, pa, branchmerge, *args, **kwargs):
- """Filter updates to only lay out files that match the narrow spec."""
- actions, diverge, renamedelete = orig(
- repo, wctx, p2, pa, branchmerge, *args, **kwargs)
-
- narrowmatch = repo.narrowmatch()
- if narrowmatch.always():
- return actions, diverge, renamedelete
-
- nooptypes = set(['k']) # TODO: handle with nonconflicttypes
- nonconflicttypes = set('a am c cm f g r e'.split())
- # We mutate the items in the dict during iteration, so iterate
- # over a copy.
- for f, action in list(actions.items()):
- if narrowmatch(f):
- pass
- elif not branchmerge:
- del actions[f] # just updating, ignore changes outside clone
- elif action[0] in nooptypes:
- del actions[f] # merge does not affect file
- elif action[0] in nonconflicttypes:
- raise error.Abort(_('merge affects file \'%s\' outside narrow, '
- 'which is not yet supported') % f,
- hint=_('merging in the other direction '
- 'may work'))
- else:
- raise error.Abort(_('conflict in file \'%s\' is outside '
- 'narrow clone') % f)
-
- return actions, diverge, renamedelete
-
- extensions.wrapfunction(merge, 'manifestmerge', _manifestmerge)
-
- def _checkcollision(orig, repo, wmf, actions):
- narrowmatch = repo.narrowmatch()
- if not narrowmatch.always():
- wmf = wmf.matches(narrowmatch)
- if actions:
- narrowactions = {}
- for m, actionsfortype in actions.iteritems():
- narrowactions[m] = []
- for (f, args, msg) in actionsfortype:
- if narrowmatch(f):
- narrowactions[m].append((f, args, msg))
- actions = narrowactions
- return orig(repo, wmf, actions)
-
- extensions.wrapfunction(merge, '_checkcollision', _checkcollision)
-
- def _computenonoverlap(orig, repo, *args, **kwargs):
- u1, u2 = orig(repo, *args, **kwargs)
- narrowmatch = repo.narrowmatch()
- if narrowmatch.always():
- return u1, u2
-
- u1 = [f for f in u1 if narrowmatch(f)]
- u2 = [f for f in u2 if narrowmatch(f)]
- return u1, u2
- extensions.wrapfunction(copies, '_computenonoverlap', _computenonoverlap)
--- a/hgext/narrow/narrowrepo.py Wed Jun 06 13:28:49 2018 -0400
+++ b/hgext/narrow/narrowrepo.py Wed Jun 06 13:31:24 2018 -0400
@@ -15,6 +15,7 @@
)
from . import (
+ narrowdirstate,
narrowrevlog,
)
@@ -62,4 +63,8 @@
return scmutil.status(modified, added, removed, deleted, unknown,
ignored, clean)
+ def _makedirstate(self):
+ dirstate = super(narrowrepository, self)._makedirstate()
+ return narrowdirstate.wrapdirstate(self, dirstate)
+
repo.__class__ = narrowrepository
--- a/hgext/notify.py Wed Jun 06 13:28:49 2018 -0400
+++ b/hgext/notify.py Wed Jun 06 13:31:24 2018 -0400
@@ -113,6 +113,9 @@
notify.diffstat
Set to True to include a diffstat before diff content. Default: True.
+notify.showfunc
+ If set, override ``diff.showfunc`` for the diff content. Default: None.
+
notify.merge
If True, send notifications for merge changesets. Default: True.
@@ -206,6 +209,9 @@
configitem('notify', 'sources',
default='serve',
)
+configitem('notify', 'showfunc',
+ default=None,
+)
configitem('notify', 'strip',
default=0,
)
@@ -260,6 +266,9 @@
self.charsets = mail._charsets(self.ui)
self.subs = self.subscribers()
self.merge = self.ui.configbool('notify', 'merge')
+ self.showfunc = self.ui.configbool('notify', 'showfunc')
+ if self.showfunc is None:
+ self.showfunc = self.ui.configbool('diff', 'showfunc')
mapfile = None
template = (self.ui.config('notify', hooktype) or
@@ -420,8 +429,9 @@
ref = ref.node()
else:
ref = ctx.node()
- chunks = patch.diff(self.repo, prev, ref,
- opts=patch.diffallopts(self.ui))
+ diffopts = patch.diffallopts(self.ui)
+ diffopts.showfunc = self.showfunc
+ chunks = patch.diff(self.repo, prev, ref, opts=diffopts)
difflines = ''.join(chunks).splitlines()
if self.ui.configbool('notify', 'diffstat'):
--- a/hgext/patchbomb.py Wed Jun 06 13:28:49 2018 -0400
+++ b/hgext/patchbomb.py Wed Jun 06 13:31:24 2018 -0400
@@ -79,7 +79,6 @@
import errno
import os
import socket
-import tempfile
from mercurial.i18n import _
from mercurial import (
@@ -94,7 +93,6 @@
patch,
pycompat,
registrar,
- repair,
scmutil,
templater,
util,
@@ -318,7 +316,7 @@
The bundle is a returned as a single in-memory binary blob.
"""
ui = repo.ui
- tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
+ tmpdir = pycompat.mkdtemp(prefix='hg-email-bundle-')
tmpfn = os.path.join(tmpdir, 'bundle')
btype = ui.config('patchbomb', 'bundletype')
if btype:
@@ -624,7 +622,7 @@
elif bookmark:
if bookmark not in repo._bookmarks:
raise error.Abort(_("bookmark '%s' not found") % bookmark)
- revs = repair.stripbmrevset(repo, bookmark)
+ revs = scmutil.bookmarkrevs(repo, bookmark)
revs = scmutil.revrange(repo, revs)
if outgoing:
--- a/hgext/rebase.py Wed Jun 06 13:28:49 2018 -0400
+++ b/hgext/rebase.py Wed Jun 06 13:31:24 2018 -0400
@@ -798,21 +798,21 @@
"""
inmemory = ui.configbool('rebase', 'experimental.inmemory')
- if (opts.get('continue') or opts.get('abort') or
+ if (opts.get(r'continue') or opts.get(r'abort') or
repo.currenttransaction() is not None):
# in-memory rebase is not compatible with resuming rebases.
# (Or if it is run within a transaction, since the restart logic can
# fail the entire transaction.)
inmemory = False
- if opts.get('auto_orphans'):
+ if opts.get(r'auto_orphans'):
for key in opts:
- if key != 'auto_orphans' and opts.get(key):
+ if key != r'auto_orphans' and opts.get(key):
raise error.Abort(_('--auto-orphans is incompatible with %s') %
- ('--' + key))
- userrevs = list(repo.revs(opts.get('auto_orphans')))
- opts['rev'] = [revsetlang.formatspec('%ld and orphan()', userrevs)]
- opts['dest'] = '_destautoorphanrebase(SRC)'
+ ('--' + pycompat.bytestr(key)))
+ userrevs = list(repo.revs(opts.get(r'auto_orphans')))
+ opts[r'rev'] = [revsetlang.formatspec('%ld and orphan()', userrevs)]
+ opts[r'dest'] = '_destautoorphanrebase(SRC)'
if inmemory:
try:
@@ -824,7 +824,7 @@
except error.InMemoryMergeConflictsError:
ui.warn(_('hit merge conflicts; re-running rebase without in-memory'
' merge\n'))
- _origrebase(ui, repo, **{'abort': True})
+ _origrebase(ui, repo, **{r'abort': True})
return _origrebase(ui, repo, inmemory=False, **opts)
else:
return _origrebase(ui, repo, **opts)
--- a/hgext/remotenames.py Wed Jun 06 13:28:49 2018 -0400
+++ b/hgext/remotenames.py Wed Jun 06 13:31:24 2018 -0400
@@ -249,6 +249,10 @@
extensions.wrapfunction(bookmarks, '_printbookmarks', wrapprintbookmarks)
def reposetup(ui, repo):
+
+ # set the config option to store remotenames
+ repo.ui.setconfig('experimental', 'remotenames', True, 'remotenames-ext')
+
if not repo.local():
return
--- a/hgext/shelve.py Wed Jun 06 13:28:49 2018 -0400
+++ b/hgext/shelve.py Wed Jun 06 13:31:24 2018 -0400
@@ -933,27 +933,27 @@
# to the original pctx.
activebookmark = _backupactivebookmark(repo)
+ tmpwctx, addedbefore = _commitworkingcopychanges(ui, repo, opts,
+ tmpwctx)
+ repo, shelvectx = _unshelverestorecommit(ui, repo, basename)
+ _checkunshelveuntrackedproblems(ui, repo, shelvectx)
+ branchtorestore = ''
+ if shelvectx.branch() != shelvectx.p1().branch():
+ branchtorestore = shelvectx.branch()
+
+ shelvectx = _rebaserestoredcommit(ui, repo, opts, tr, oldtiprev,
+ basename, pctx, tmpwctx,
+ shelvectx, branchtorestore,
+ activebookmark)
overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
with ui.configoverride(overrides, 'unshelve'):
- tmpwctx, addedbefore = _commitworkingcopychanges(ui, repo, opts,
- tmpwctx)
- repo, shelvectx = _unshelverestorecommit(ui, repo, basename)
- _checkunshelveuntrackedproblems(ui, repo, shelvectx)
- branchtorestore = ''
- if shelvectx.branch() != shelvectx.p1().branch():
- branchtorestore = shelvectx.branch()
+ mergefiles(ui, repo, pctx, shelvectx)
+ restorebranch(ui, repo, branchtorestore)
+ _forgetunknownfiles(repo, shelvectx, addedbefore)
- shelvectx = _rebaserestoredcommit(ui, repo, opts, tr, oldtiprev,
- basename, pctx, tmpwctx,
- shelvectx, branchtorestore,
- activebookmark)
- mergefiles(ui, repo, pctx, shelvectx)
- restorebranch(ui, repo, branchtorestore)
- _forgetunknownfiles(repo, shelvectx, addedbefore)
-
- shelvedstate.clear(repo)
- _finishunshelve(repo, oldtiprev, tr, activebookmark)
- unshelvecleanup(ui, repo, basename, opts)
+ shelvedstate.clear(repo)
+ _finishunshelve(repo, oldtiprev, tr, activebookmark)
+ unshelvecleanup(ui, repo, basename, opts)
finally:
if tr:
tr.release()
--- a/hgext/sparse.py Wed Jun 06 13:28:49 2018 -0400
+++ b/hgext/sparse.py Wed Jun 06 13:31:24 2018 -0400
@@ -138,9 +138,9 @@
extensions.wrapfunction(logcmdutil, '_initialrevs', _initialrevs)
def _clonesparsecmd(orig, ui, repo, *args, **opts):
- include_pat = opts.get('include')
- exclude_pat = opts.get('exclude')
- enableprofile_pat = opts.get('enable_profile')
+ include_pat = opts.get(r'include')
+ exclude_pat = opts.get(r'exclude')
+ enableprofile_pat = opts.get(r'enable_profile')
include = exclude = enableprofile = False
if include_pat:
pat = include_pat
@@ -178,7 +178,7 @@
'also include directories of added files in sparse config'))
def _add(orig, ui, repo, *pats, **opts):
- if opts.get('sparse'):
+ if opts.get(r'sparse'):
dirs = set()
for pat in pats:
dirname, basename = util.split(pat)
--- a/hgext/split.py Wed Jun 06 13:28:49 2018 -0400
+++ b/hgext/split.py Wed Jun 06 13:31:24 2018 -0400
@@ -60,6 +60,7 @@
By default, rebase connected non-obsoleted descendants onto the new
changeset. Use --no-rebase to avoid the rebase.
"""
+ opts = pycompat.byteskwargs(opts)
revlist = []
if opts.get('rev'):
revlist.append(opts.get('rev'))
--- a/hgext/strip.py Wed Jun 06 13:28:49 2018 -0400
+++ b/hgext/strip.py Wed Jun 06 13:31:24 2018 -0400
@@ -165,7 +165,7 @@
nodetobookmarks.setdefault(node, []).append(mark)
for marks in nodetobookmarks.values():
if bookmarks.issuperset(marks):
- rsrevs = repair.stripbmrevset(repo, marks[0])
+ rsrevs = scmutil.bookmarkrevs(repo, marks[0])
revs.update(set(rsrevs))
if not revs:
with repo.lock(), repo.transaction('bookmark') as tr:
--- a/hgext/transplant.py Wed Jun 06 13:28:49 2018 -0400
+++ b/hgext/transplant.py Wed Jun 06 13:31:24 2018 -0400
@@ -16,7 +16,7 @@
from __future__ import absolute_import
import os
-import tempfile
+
from mercurial.i18n import _
from mercurial import (
bundlerepo,
@@ -215,7 +215,7 @@
if skipmerge:
patchfile = None
else:
- fd, patchfile = tempfile.mkstemp(prefix='hg-transplant-')
+ fd, patchfile = pycompat.mkstemp(prefix='hg-transplant-')
fp = os.fdopen(fd, r'wb')
gen = patch.diff(source, parent, node, opts=diffopts)
for chunk in gen:
@@ -263,7 +263,7 @@
self.ui.status(_('filtering %s\n') % patchfile)
user, date, msg = (changelog[1], changelog[2], changelog[4])
- fd, headerfile = tempfile.mkstemp(prefix='hg-transplant-')
+ fd, headerfile = pycompat.mkstemp(prefix='hg-transplant-')
fp = os.fdopen(fd, r'wb')
fp.write("# HG changeset patch\n")
fp.write("# User %s\n" % user)
--- a/i18n/da.po Wed Jun 06 13:28:49 2018 -0400
+++ b/i18n/da.po Wed Jun 06 13:31:24 2018 -0400
@@ -13696,7 +13696,7 @@
msgstr ""
msgid ""
-"``allow_archive``\n"
+"``allow-archive``\n"
" List of archive format (bz2, gz, zip) allowed for downloading.\n"
" Default is empty."
msgstr ""
--- a/i18n/de.po Wed Jun 06 13:28:49 2018 -0400
+++ b/i18n/de.po Wed Jun 06 13:31:24 2018 -0400
@@ -17347,7 +17347,7 @@
msgstr ""
msgid ""
-"``allow_archive``\n"
+"``allow-archive``\n"
" List of archive format (bz2, gz, zip) allowed for downloading.\n"
" Default is empty."
msgstr ""
--- a/i18n/ja.po Wed Jun 06 13:28:49 2018 -0400
+++ b/i18n/ja.po Wed Jun 06 13:31:24 2018 -0400
@@ -27712,11 +27712,11 @@
" サーバの待ちうけアドレス。 (デフォルト値: ホストの持つ全アドレス)"
msgid ""
-"``allow_archive``\n"
+"``allow-archive``\n"
" List of archive format (bz2, gz, zip) allowed for downloading.\n"
" (default: empty)"
msgstr ""
-"``allow_archive``\n"
+"``allow-archive``\n"
" 利用可能なダウンロード向けのアーカイブ形式 (bz2, gz, zip) 一覧。\n"
" (デフォルト値: 空 = ダウンロード不可)"
--- a/i18n/pt_BR.po Wed Jun 06 13:28:49 2018 -0400
+++ b/i18n/pt_BR.po Wed Jun 06 13:31:24 2018 -0400
@@ -28663,11 +28663,11 @@
" (padrão: usa todos os endereços)"
msgid ""
-"``allow_archive``\n"
+"``allow-archive``\n"
" List of archive format (bz2, gz, zip) allowed for downloading.\n"
" (default: empty)"
msgstr ""
-"``allow_archive``\n"
+"``allow-archive``\n"
" Lista de formatos de pacote (bz2, gz, zip) permitidos para download.\n"
" (padrão: lista vazia)"
--- a/i18n/ro.po Wed Jun 06 13:28:49 2018 -0400
+++ b/i18n/ro.po Wed Jun 06 13:31:24 2018 -0400
@@ -12099,7 +12099,7 @@
msgstr ""
msgid ""
-"``allow_archive``\n"
+"``allow-archive``\n"
" List of archive format (bz2, gz, zip) allowed for downloading.\n"
" Default is empty."
msgstr ""
--- a/i18n/ru.po Wed Jun 06 13:28:49 2018 -0400
+++ b/i18n/ru.po Wed Jun 06 13:31:24 2018 -0400
@@ -19776,11 +19776,11 @@
" Адрес прослушиваемого интерфейса. По умолчанию все интерфейсы."
msgid ""
-"``allow_archive``\n"
+"``allow-archive``\n"
" List of archive format (bz2, gz, zip) allowed for downloading.\n"
" Default is empty."
msgstr ""
-"``allow_archive``\n"
+"``allow-archive``\n"
" Список форматов архивов (bz2, gz, zip), которые можно скачивать.\n"
" По умолчанию пуст."
--- a/i18n/sv.po Wed Jun 06 13:28:49 2018 -0400
+++ b/i18n/sv.po Wed Jun 06 13:31:24 2018 -0400
@@ -15034,7 +15034,7 @@
msgstr ""
msgid ""
-"``allow_archive``\n"
+"``allow-archive``\n"
" List of archive format (bz2, gz, zip) allowed for downloading.\n"
" Default is empty."
msgstr ""
--- a/mercurial/bookmarks.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/bookmarks.py Wed Jun 06 13:31:24 2018 -0400
@@ -43,7 +43,7 @@
fp, pending = txnutil.trypending(repo.root, repo.vfs, 'bookmarks')
return fp
-class bmstore(dict):
+class bmstore(object):
"""Storage for bookmarks.
This object should do all bookmark-related reads and writes, so
@@ -58,13 +58,13 @@
"""
def __init__(self, repo):
- dict.__init__(self)
self._repo = repo
+ self._refmap = refmap = {} # refspec: node
+ self._nodemap = nodemap = {} # node: sorted([refspec, ...])
self._clean = True
self._aclean = True
nm = repo.changelog.nodemap
tonode = bin # force local lookup
- setitem = dict.__setitem__
try:
with _getbkfile(repo) as bkfile:
for line in bkfile:
@@ -76,7 +76,15 @@
node = tonode(sha)
if node in nm:
refspec = encoding.tolocal(refspec)
- setitem(self, refspec, node)
+ refmap[refspec] = node
+ nrefs = nodemap.get(node)
+ if nrefs is None:
+ nodemap[node] = [refspec]
+ else:
+ nrefs.append(refspec)
+ if nrefs[-2] > refspec:
+ # bookmarks weren't sorted before 4.5
+ nrefs.sort()
except (TypeError, ValueError):
# TypeError:
# - bin(...)
@@ -96,38 +104,78 @@
@active.setter
def active(self, mark):
- if mark is not None and mark not in self:
+ if mark is not None and mark not in self._refmap:
raise AssertionError('bookmark %s does not exist!' % mark)
self._active = mark
self._aclean = False
- def __setitem__(self, *args, **kwargs):
- raise error.ProgrammingError("use 'bookmarks.applychanges' instead")
+ def __len__(self):
+ return len(self._refmap)
+
+ def __iter__(self):
+ return iter(self._refmap)
+
+ def iteritems(self):
+ return self._refmap.iteritems()
+
+ def items(self):
+ return self._refmap.items()
- def _set(self, key, value):
- self._clean = False
- return dict.__setitem__(self, key, value)
+ # TODO: maybe rename to allnames()?
+ def keys(self):
+ return self._refmap.keys()
+
+ # TODO: maybe rename to allnodes()? but nodes would have to be deduplicated
+ # could be self._nodemap.keys()
+ def values(self):
+ return self._refmap.values()
+
+ def __contains__(self, mark):
+ return mark in self._refmap
+
+ def __getitem__(self, mark):
+ return self._refmap[mark]
- def __delitem__(self, key):
- raise error.ProgrammingError("use 'bookmarks.applychanges' instead")
+ def get(self, mark, default=None):
+ return self._refmap.get(mark, default)
- def _del(self, key):
+ def _set(self, mark, node):
self._clean = False
- return dict.__delitem__(self, key)
+ if mark in self._refmap:
+ self._del(mark)
+ self._refmap[mark] = node
+ nrefs = self._nodemap.get(node)
+ if nrefs is None:
+ self._nodemap[node] = [mark]
+ else:
+ nrefs.append(mark)
+ nrefs.sort()
- def update(self, *others):
- raise error.ProgrammingError("use 'bookmarks.applychanges' instead")
+ def _del(self, mark):
+ self._clean = False
+ node = self._refmap.pop(mark)
+ nrefs = self._nodemap[node]
+ if len(nrefs) == 1:
+ assert nrefs[0] == mark
+ del self._nodemap[node]
+ else:
+ nrefs.remove(mark)
+
+ def names(self, node):
+ """Return a sorted list of bookmarks pointing to the specified node"""
+ return self._nodemap.get(node, [])
def changectx(self, mark):
- return self._repo[self[mark]]
+ node = self._refmap[mark]
+ return self._repo[node]
def applychanges(self, repo, tr, changes):
"""Apply a list of changes to bookmarks
"""
bmchanges = tr.changes.get('bookmarks')
for name, node in changes:
- old = self.get(name)
+ old = self._refmap.get(name)
if node is None:
self._del(name)
else:
@@ -151,7 +199,7 @@
def _writerepo(self, repo):
"""Factored out for extensibility"""
rbm = repo._bookmarks
- if rbm.active not in self:
+ if rbm.active not in self._refmap:
rbm.active = None
rbm._writeactive()
@@ -182,7 +230,7 @@
self._aclean = True
def _write(self, fp):
- for name, node in sorted(self.iteritems()):
+ for name, node in sorted(self._refmap.iteritems()):
fp.write("%s %s\n" % (hex(node), encoding.fromlocal(name)))
self._clean = True
self._repo.invalidatevolatilesets()
@@ -208,15 +256,15 @@
If divergent bookmark are to be deleted, they will be returned as list.
"""
cur = self._repo['.'].node()
- if mark in self and not force:
+ if mark in self._refmap and not force:
if target:
- if self[mark] == target and target == cur:
+ if self._refmap[mark] == target and target == cur:
# re-activating a bookmark
return []
rev = self._repo[target].rev()
anc = self._repo.changelog.ancestors([rev])
bmctx = self.changectx(mark)
- divs = [self[b] for b in self
+ divs = [self._refmap[b] for b in self._refmap
if b.split('@', 1)[0] == mark.split('@', 1)[0]]
# allow resolving a single divergent bookmark even if moving
--- a/mercurial/bundlerepo.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/bundlerepo.py Wed Jun 06 13:31:24 2018 -0400
@@ -15,7 +15,6 @@
import os
import shutil
-import tempfile
from .i18n import _
from .node import nullid
@@ -270,7 +269,7 @@
try:
localrepo.localrepository.__init__(self, ui, repopath)
except error.RepoError:
- self._tempparent = tempfile.mkdtemp()
+ self._tempparent = pycompat.mkdtemp()
localrepo.instance(ui, self._tempparent, 1)
localrepo.localrepository.__init__(self, ui, self._tempparent)
self.ui.setconfig('phases', 'publish', False, 'bundlerepo')
--- a/mercurial/cext/parsers.c Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/cext/parsers.c Wed Jun 06 13:31:24 2018 -0400
@@ -713,7 +713,7 @@
void manifest_module_init(PyObject *mod);
void revlog_module_init(PyObject *mod);
-static const int version = 4;
+static const int version = 5;
static void module_init(PyObject *mod)
{
--- a/mercurial/cext/pathencode.c Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/cext/pathencode.c Wed Jun 06 13:31:24 2018 -0400
@@ -655,16 +655,10 @@
PyObject *shaobj, *hashobj;
if (shafunc == NULL) {
- PyObject *hashlib, *name = PyBytes_FromString("hashlib");
-
- if (name == NULL)
- return -1;
-
- hashlib = PyImport_Import(name);
- Py_DECREF(name);
-
+ PyObject *hashlib = PyImport_ImportModule("hashlib");
if (hashlib == NULL) {
- PyErr_SetString(PyExc_ImportError, "hashlib");
+ PyErr_SetString(PyExc_ImportError,
+ "pathencode failed to find hashlib");
return -1;
}
shafunc = PyObject_GetAttrString(hashlib, "sha1");
@@ -673,12 +667,12 @@
if (shafunc == NULL) {
PyErr_SetString(PyExc_AttributeError,
"module 'hashlib' has no "
- "attribute 'sha1'");
+ "attribute 'sha1' in pathencode");
return -1;
}
}
- shaobj = PyObject_CallFunction(shafunc, "s#", str, len);
+ shaobj = PyObject_CallFunction(shafunc, PY23("s#", "y#"), str, len);
if (shaobj == NULL)
return -1;
--- a/mercurial/cext/revlog.c Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/cext/revlog.c Wed Jun 06 13:31:24 2018 -0400
@@ -248,6 +248,20 @@
return data ? data + 32 : NULL;
}
+/*
+ * Return the 20-byte SHA of the node corresponding to the given rev. The
+ * rev is assumed to be existing. If not, an exception is set.
+ */
+static const char *index_node_existing(indexObject *self, Py_ssize_t pos)
+{
+ const char *node = index_node(self, pos);
+ if (node == NULL) {
+ PyErr_Format(PyExc_IndexError, "could not access rev %d",
+ (int)pos);
+ }
+ return node;
+}
+
static int nt_insert(indexObject *self, const char *node, int rev);
static int node_check(PyObject *obj, char **node, Py_ssize_t *nodelen)
@@ -1054,10 +1068,12 @@
return 0;
}
if (v < 0) {
- const char *oldnode = index_node(self, -(v + 1));
+ const char *oldnode = index_node_existing(self, -(v + 1));
int noff;
- if (!oldnode || !memcmp(oldnode, node, 20)) {
+ if (oldnode == NULL)
+ return -1;
+ if (!memcmp(oldnode, node, 20)) {
n->children[k] = -rev - 1;
return 0;
}
@@ -1137,9 +1153,9 @@
*/
if (self->ntmisses++ < 4) {
for (rev = self->ntrev - 1; rev >= 0; rev--) {
- const char *n = index_node(self, rev);
+ const char *n = index_node_existing(self, rev);
if (n == NULL)
- return -2;
+ return -3;
if (memcmp(node, n, nodelen > 20 ? 20 : nodelen) == 0) {
if (nt_insert(self, n, rev) == -1)
return -3;
@@ -1148,11 +1164,9 @@
}
} else {
for (rev = self->ntrev - 1; rev >= 0; rev--) {
- const char *n = index_node(self, rev);
- if (n == NULL) {
- self->ntrev = rev + 1;
- return -2;
- }
+ const char *n = index_node_existing(self, rev);
+ if (n == NULL)
+ return -3;
if (nt_insert(self, n, rev) == -1) {
self->ntrev = rev + 1;
return -3;
@@ -1218,27 +1232,84 @@
return NULL;
}
+/*
+ * Fully populate the radix tree.
+ */
+static int nt_populate(indexObject *self) {
+ int rev;
+ if (self->ntrev > 0) {
+ for (rev = self->ntrev - 1; rev >= 0; rev--) {
+ const char *n = index_node_existing(self, rev);
+ if (n == NULL)
+ return -1;
+ if (nt_insert(self, n, rev) == -1)
+ return -1;
+ }
+ self->ntrev = -1;
+ }
+ return 0;
+}
+
static int nt_partialmatch(indexObject *self, const char *node,
Py_ssize_t nodelen)
{
- int rev;
+ if (nt_init(self) == -1)
+ return -3;
+ if (nt_populate(self) == -1)
+ return -3;
+
+ return nt_find(self, node, nodelen, 1);
+}
+
+/*
+ * Find the length of the shortest unique prefix of node.
+ *
+ * Return values:
+ *
+ * -3: error (exception set)
+ * -2: not found (no exception set)
+ * rest: length of shortest prefix
+ */
+static int nt_shortest(indexObject *self, const char *node)
+{
+ int level, off;
if (nt_init(self) == -1)
return -3;
+ if (nt_populate(self) == -1)
+ return -3;
- if (self->ntrev > 0) {
- /* ensure that the radix tree is fully populated */
- for (rev = self->ntrev - 1; rev >= 0; rev--) {
- const char *n = index_node(self, rev);
+ for (level = off = 0; level < 40; level++) {
+ int k, v;
+ nodetree *n = &self->nt[off];
+ k = nt_level(node, level);
+ v = n->children[k];
+ if (v < 0) {
+ const char *n;
+ v = -(v + 1);
+ n = index_node_existing(self, v);
if (n == NULL)
+ return -3;
+ if (memcmp(node, n, 20) != 0)
+ /*
+ * Found a unique prefix, but it wasn't for the
+ * requested node (i.e the requested node does
+ * not exist).
+ */
return -2;
- if (nt_insert(self, n, rev) == -1)
- return -3;
+ return level + 1;
}
- self->ntrev = rev;
+ if (v == 0)
+ return -2;
+ off = v;
}
-
- return nt_find(self, node, nodelen, 1);
+ /*
+ * The node was still not unique after 40 hex digits, so this won't
+ * happen. Also, if we get here, then there's a programming error in
+ * this file that made us insert a node longer than 40 hex digits.
+ */
+ PyErr_SetString(PyExc_Exception, "broken node tree");
+ return -3;
}
static PyObject *index_partialmatch(indexObject *self, PyObject *args)
@@ -1251,7 +1322,7 @@
if (!PyArg_ParseTuple(args, PY23("s#", "y#"), &node, &nodelen))
return NULL;
- if (nodelen < 4) {
+ if (nodelen < 1) {
PyErr_SetString(PyExc_ValueError, "key too short");
return NULL;
}
@@ -1282,15 +1353,36 @@
return PyBytes_FromStringAndSize(nullid, 20);
}
- fullnode = index_node(self, rev);
+ fullnode = index_node_existing(self, rev);
if (fullnode == NULL) {
- PyErr_Format(PyExc_IndexError,
- "could not access rev %d", rev);
return NULL;
}
return PyBytes_FromStringAndSize(fullnode, 20);
}
+static PyObject *index_shortest(indexObject *self, PyObject *args)
+{
+ Py_ssize_t nodelen;
+ PyObject *val;
+ char *node;
+ int length;
+
+ if (!PyArg_ParseTuple(args, "O", &val))
+ return NULL;
+ if (node_check(val, &node, &nodelen) == -1)
+ return NULL;
+
+ self->ntlookups++;
+ length = nt_shortest(self, node);
+ if (length == -3)
+ return NULL;
+ if (length == -2) {
+ raise_revlog_error();
+ return NULL;
+ }
+ return PyInt_FromLong(length);
+}
+
static PyObject *index_m_get(indexObject *self, PyObject *args)
{
Py_ssize_t nodelen;
@@ -1760,10 +1852,11 @@
Py_ssize_t i;
for (i = start + 1; i < self->length - 1; i++) {
- const char *node = index_node(self, i);
+ const char *node = index_node_existing(self, i);
+ if (node == NULL)
+ return -1;
- if (node)
- nt_insert(self, node, -1);
+ nt_insert(self, node, -1);
}
if (self->added)
nt_invalidate_added(self, 0);
@@ -1979,6 +2072,8 @@
"insert an index entry"},
{"partialmatch", (PyCFunction)index_partialmatch, METH_VARARGS,
"match a potentially ambiguous node ID"},
+ {"shortest", (PyCFunction)index_shortest, METH_VARARGS,
+ "find length of shortest hex nodeid of a binary ID"},
{"stats", (PyCFunction)index_stats, METH_NOARGS,
"stats for the index"},
{NULL} /* Sentinel */
--- a/mercurial/changegroup.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/changegroup.py Wed Jun 06 13:31:24 2018 -0400
@@ -9,7 +9,6 @@
import os
import struct
-import tempfile
import weakref
from .i18n import _
@@ -80,7 +79,7 @@
# small (4k is common on Linux).
fh = open(filename, "wb", 131072)
else:
- fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
+ fd, filename = pycompat.mkstemp(prefix="hg-bundle-", suffix=".hg")
fh = os.fdopen(fd, r"wb")
cleanup = filename
for c in chunks:
--- a/mercurial/cmdutil.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/cmdutil.py Wed Jun 06 13:31:24 2018 -0400
@@ -10,7 +10,6 @@
import errno
import os
import re
-import tempfile
from .i18n import _
from .node import (
@@ -37,7 +36,6 @@
patch,
pathutil,
pycompat,
- registrar,
revlog,
rewriteutil,
scmutil,
@@ -63,11 +61,6 @@
_('do not perform actions, just print output')),
]
-confirmopts = [
- ('', 'confirm', None,
- _('ask before applying actions')),
-]
-
remoteopts = [
('e', 'ssh', '',
_('specify ssh command to use'), _('CMD')),
@@ -203,17 +196,21 @@
return oldwrite
def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
- if usecurses:
- if testfile:
- recordfn = crecordmod.testdecorator(testfile,
- crecordmod.testchunkselector)
- else:
- recordfn = crecordmod.chunkselector
-
- return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
-
- else:
- return patch.filterpatch(ui, originalhunks, operation)
+ try:
+ if usecurses:
+ if testfile:
+ recordfn = crecordmod.testdecorator(
+ testfile, crecordmod.testchunkselector)
+ else:
+ recordfn = crecordmod.chunkselector
+
+ return crecordmod.filterpatch(ui, originalhunks, recordfn,
+ operation)
+ except crecordmod.fallbackerror as e:
+ ui.warn('%s\n' % e.message)
+ ui.warn(_('falling back to text mode\n'))
+
+ return patch.filterpatch(ui, originalhunks, operation)
def recordfilter(ui, originalhunks, operation=None):
""" Prompts the user to filter the originalhunks and return a list of
@@ -331,7 +328,7 @@
try:
# backup continues
for f in tobackup:
- fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
+ fd, tmpname = pycompat.mkstemp(prefix=f.replace('/', '_') + '.',
dir=backupdir)
os.close(fd)
ui.debug('backup %r as %r\n' % (f, tmpname))
@@ -419,7 +416,7 @@
Represent a directory in user working copy with information required for
the purpose of tersing its status.
- path is the path to the directory
+ path is the path to the directory, without a trailing '/'
statuses is a set of statuses of all files in this directory (this includes
all the files in all the subdirectories too)
@@ -456,7 +453,7 @@
# does the dirnode object for subdir exists
if subdir not in self.subdirs:
- subdirpath = os.path.join(self.path, subdir)
+ subdirpath = pathutil.join(self.path, subdir)
self.subdirs[subdir] = dirnode(subdirpath)
# try adding the file in subdir
@@ -471,7 +468,7 @@
def iterfilepaths(self):
"""Yield (status, path) for files directly under this directory."""
for f, st in self.files:
- yield st, os.path.join(self.path, f)
+ yield st, pathutil.join(self.path, f)
def tersewalk(self, terseargs):
"""
@@ -485,7 +482,7 @@
1) All the files in the directory (including all the files in its
subdirectories) share the same status and the user has asked us to terse
- that status. -> yield (status, dirpath)
+ that status. -> yield (status, dirpath). dirpath will end in '/'.
2) Otherwise, we do following:
@@ -502,7 +499,7 @@
# Making sure we terse only when the status abbreviation is
# passed as terse argument
if onlyst in terseargs:
- yield onlyst, self.path + pycompat.ossep
+ yield onlyst, self.path + '/'
return
# add the files to status list
@@ -3002,12 +2999,6 @@
if not opts.get('dry_run'):
needdata = ('revert', 'add', 'undelete')
- if _revertprefetch is not _revertprefetchstub:
- ui.deprecwarn("'cmdutil._revertprefetch' is deprecated, "
- "add a callback to 'scmutil.fileprefetchhooks'",
- '4.6', stacklevel=1)
- _revertprefetch(repo, ctx,
- *[actions[name][0] for name in needdata])
oplist = [actions[name][0] for name in needdata]
prefetch = scmutil.prefetchfiles
matchfiles = scmutil.matchfiles
@@ -3026,12 +3017,6 @@
raise error.Abort("subrepository '%s' does not exist in %s!"
% (sub, short(ctx.node())))
-def _revertprefetchstub(repo, ctx, *files):
- """Stub method for detecting extension wrapping of _revertprefetch(), to
- issue a deprecation warning."""
-
-_revertprefetch = _revertprefetchstub
-
def _performrevert(repo, parents, ctx, actions, interactive=False,
tobackup=None):
"""function that actually perform all the actions computed for revert
@@ -3168,12 +3153,6 @@
if f in copied:
repo.dirstate.copy(copied[f], f)
-class command(registrar.command):
- """deprecated: used registrar.command instead"""
- def _doregister(self, func, name, *args, **kwargs):
- func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
- return super(command, self)._doregister(func, name, *args, **kwargs)
-
# a list of (ui, repo, otherpeer, opts, missing) functions called by
# commands.outgoing. "missing" is "missing" of the result of
# "findcommonoutgoing()"
@@ -3285,23 +3264,3 @@
if after[1]:
hint = after[0]
raise error.Abort(_('no %s in progress') % task, hint=hint)
-
-class changeset_printer(logcmdutil.changesetprinter):
-
- def __init__(self, ui, *args, **kwargs):
- msg = ("'cmdutil.changeset_printer' is deprecated, "
- "use 'logcmdutil.logcmdutil'")
- ui.deprecwarn(msg, "4.6")
- super(changeset_printer, self).__init__(ui, *args, **kwargs)
-
-def displaygraph(ui, *args, **kwargs):
- msg = ("'cmdutil.displaygraph' is deprecated, "
- "use 'logcmdutil.displaygraph'")
- ui.deprecwarn(msg, "4.6")
- return logcmdutil.displaygraph(ui, *args, **kwargs)
-
-def show_changeset(ui, *args, **kwargs):
- msg = ("'cmdutil.show_changeset' is deprecated, "
- "use 'logcmdutil.changesetdisplayer'")
- ui.deprecwarn(msg, "4.6")
- return logcmdutil.changesetdisplayer(ui, *args, **kwargs)
--- a/mercurial/commands.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/commands.py Wed Jun 06 13:31:24 2018 -0400
@@ -54,6 +54,7 @@
rewriteutil,
scmutil,
server,
+ state as statemod,
streamclone,
tags as tagsmod,
templatekw,
@@ -63,7 +64,6 @@
)
from .utils import (
dateutil,
- procutil,
stringutil,
)
@@ -1895,7 +1895,9 @@
root=opts.get('root'))
@command('^export',
- [('o', 'output', '',
+ [('B', 'bookmark', '',
+ _('export changes only reachable by given bookmark')),
+ ('o', 'output', '',
_('print output to file with formatted name'), _('FORMAT')),
('', 'switch-parent', None, _('diff against the second parent')),
('r', 'rev', [], _('revisions to export'), _('REV')),
@@ -1938,6 +1940,9 @@
of files it detects as binary. With -a, export will generate a
diff anyway, probably with undesirable results.
+ With -B/--bookmark changesets reachable by the given bookmark are
+ selected.
+
Use the -g/--git option to generate diffs in the git extended diff
format. See :hg:`help diffs` for more information.
@@ -1966,11 +1971,24 @@
Returns 0 on success.
"""
opts = pycompat.byteskwargs(opts)
+ bookmark = opts.get('bookmark')
changesets += tuple(opts.get('rev', []))
- if not changesets:
- changesets = ['.']
- repo = scmutil.unhidehashlikerevs(repo, changesets, 'nowarn')
- revs = scmutil.revrange(repo, changesets)
+
+ if bookmark and changesets:
+ raise error.Abort(_("-r and -B are mutually exclusive"))
+
+ if bookmark:
+ if bookmark not in repo._bookmarks:
+ raise error.Abort(_("bookmark '%s' not found") % bookmark)
+
+ revs = scmutil.bookmarkrevs(repo, bookmark)
+ else:
+ if not changesets:
+ changesets = ['.']
+
+ repo = scmutil.unhidehashlikerevs(repo, changesets, 'nowarn')
+ revs = scmutil.revrange(repo, changesets)
+
if not revs:
raise error.Abort(_("export requires at least one changeset"))
if len(revs) > 1:
@@ -2146,7 +2164,7 @@
.. note::
The -c/--continue option does not reapply earlier options, except
- for --force.
+ for --force, --user and --date.
.. container:: verbose
@@ -2188,6 +2206,8 @@
revs = list(revs)
revs.extend(opts.get('rev'))
+ # a dict of data to be stored in state file
+ statedata = {}
if not opts.get('user') and opts.get('currentuser'):
opts['user'] = ui.username()
@@ -2198,17 +2218,23 @@
**pycompat.strkwargs(opts))
cont = False
+ graftstate = statemod.cmdstate(repo, 'graftstate')
if opts.get('continue'):
cont = True
if revs:
raise error.Abort(_("can't specify --continue and revisions"))
# read in unfinished revisions
- try:
- nodes = repo.vfs.read('graftstate').splitlines()
+ if graftstate.exists():
+ statedata = _readgraftstate(repo, graftstate)
+ if statedata.get('date'):
+ opts['date'] = statedata['date']
+ if statedata.get('user'):
+ opts['user'] = statedata['user']
+ if statedata.get('log'):
+ opts['log'] = True
+ nodes = statedata['nodes']
revs = [repo[node].rev() for node in nodes]
- except IOError as inst:
- if inst.errno != errno.ENOENT:
- raise
+ else:
cmdutil.wrongtooltocontinue(repo, _('graft'))
else:
if not revs:
@@ -2312,12 +2338,15 @@
user = ctx.user()
if opts.get('user'):
user = opts['user']
+ statedata['user'] = user
date = ctx.date()
if opts.get('date'):
date = opts['date']
+ statedata['date'] = date
message = ctx.description()
if opts.get('log'):
message += '\n(grafted from %s)' % ctx.hex()
+ statedata['log'] = True
# we don't merge the first commit when continuing
if not cont:
@@ -2333,16 +2362,11 @@
# report any conflicts
if stats.unresolvedcount > 0:
# write out state for --continue
- nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
- repo.vfs.write('graftstate', ''.join(nodelines))
- extra = ''
- if opts.get('user'):
- extra += ' --user %s' % procutil.shellquote(opts['user'])
- if opts.get('date'):
- extra += ' --date %s' % procutil.shellquote(opts['date'])
- if opts.get('log'):
- extra += ' --log'
- hint=_("use 'hg resolve' and 'hg graft --continue%s'") % extra
+ nodes = [repo[rev].hex() for rev in revs[pos:]]
+ statedata['nodes'] = nodes
+ stateversion = 1
+ graftstate.save(stateversion, statedata)
+ hint = _("use 'hg resolve' and 'hg graft --continue'")
raise error.Abort(
_("unresolved conflicts, can't continue"),
hint=hint)
@@ -2359,10 +2383,18 @@
# remove state when we complete successfully
if not opts.get('dry_run'):
- repo.vfs.unlinkpath('graftstate', ignoremissing=True)
+ graftstate.delete()
return 0
+def _readgraftstate(repo, graftstate):
+ """read the graft state file and return a dict of the data stored in it"""
+ try:
+ return graftstate.read()
+ except error.CorruptedState:
+ nodes = repo.vfs.read('graftstate').splitlines()
+ return {'nodes': nodes}
+
@command('grep',
[('0', 'print0', None, _('end fields with NUL')),
('', 'all', None, _('print all revisions that match')),
@@ -2481,7 +2513,7 @@
yield ('+', b[i])
def display(fm, fn, ctx, pstates, states):
- rev = ctx.rev()
+ rev = scmutil.intrev(ctx)
if fm.isplain():
formatuser = ui.shortuser
else:
@@ -2494,7 +2526,10 @@
@util.cachefunc
def binary():
flog = getfile(fn)
- return stringutil.binary(flog.read(ctx.filenode(fn)))
+ try:
+ return stringutil.binary(flog.read(ctx.filenode(fn)))
+ except error.WdirUnsupported:
+ return ctx[fn].isbinary()
fieldnamemap = {'filename': 'file', 'linenumber': 'line_number'}
if opts.get('all'):
@@ -2503,7 +2538,8 @@
iter = [('', l) for l in states]
for change, l in iter:
fm.startitem()
- fm.data(node=fm.hexfunc(ctx.node()))
+ fm.data(node=fm.hexfunc(scmutil.binnode(ctx)))
+
cols = [
('filename', fn, True),
('rev', rev, True),
@@ -2569,8 +2605,10 @@
fnode = ctx.filenode(fn)
except error.LookupError:
continue
-
- copied = flog.renamed(fnode)
+ try:
+ copied = flog.renamed(fnode)
+ except error.WdirUnsupported:
+ copied = ctx[fn].renamed()
copy = follow and copied and copied[0]
if copy:
copies.setdefault(rev, {})[fn] = copy
@@ -2581,7 +2619,11 @@
files.append(fn)
if fn not in matches[rev]:
- grepbody(fn, rev, flog.read(fnode))
+ try:
+ content = flog.read(fnode)
+ except error.WdirUnsupported:
+ content = ctx[fn].data()
+ grepbody(fn, rev, content)
pfn = copy or fn
if pfn not in matches[parent]:
@@ -3679,6 +3721,13 @@
Returns 0 if there are outgoing changes, 1 otherwise.
"""
+ # hg._outgoing() needs to re-resolve the path in order to handle #branch
+ # style URLs, so don't overwrite dest.
+ path = ui.paths.getpath(dest, default=('default-push', 'default'))
+ if not path:
+ raise error.Abort(_('default repository not configured!'),
+ hint=_("see 'hg help config.paths'"))
+
opts = pycompat.byteskwargs(opts)
if opts.get('graph'):
logcmdutil.checkunsupportedgraphflags([], opts)
@@ -3696,8 +3745,7 @@
return 0
if opts.get('bookmarks'):
- dest = ui.expandpath(dest or 'default-push', dest or 'default')
- dest, branches = hg.parseurl(dest, opts.get('branch'))
+ dest = path.pushloc or path.loc
other = hg.peer(repo, opts, dest)
if 'bookmarks' not in other.listkeys('namespaces'):
ui.warn(_("remote doesn't support bookmarks\n"))
@@ -3706,7 +3754,7 @@
ui.pager('outgoing')
return bookmarks.outgoing(ui, repo, other)
- repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
+ repo._subtoppath = path.pushloc or path.loc
try:
return hg.outgoing(ui, repo, dest, opts)
finally:
@@ -4747,7 +4795,8 @@
('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
('', 'style', '', _('template style to use'), _('STYLE')),
('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
- ('', 'certificate', '', _('SSL certificate file'), _('FILE'))]
+ ('', 'certificate', '', _('SSL certificate file'), _('FILE')),
+ ('', 'print-url', None, _('start and print only the URL'))]
+ subrepoopts,
_('[OPTION]...'),
optionalrepo=True)
@@ -4779,6 +4828,10 @@
opts = pycompat.byteskwargs(opts)
if opts["stdio"] and opts["cmdserver"]:
raise error.Abort(_("cannot use --stdio with --cmdserver"))
+ if opts["print_url"] and ui.verbose:
+ raise error.Abort(_("cannot use --print-url with --verbose"))
+ if opts["print_url"]:
+ opts['daemon'] = True
if opts["stdio"]:
if repo is None:
@@ -4790,6 +4843,8 @@
service = server.createservice(ui, repo, opts)
return server.runservice(opts, initfn=service.init, runfn=service.run)
+_NOTTERSE = 'nothing'
+
@command('^status|st',
[('A', 'all', None, _('show status of all files')),
('m', 'modified', None, _('show only modified files')),
@@ -4800,7 +4855,7 @@
('u', 'unknown', None, _('show only unknown (not tracked) files')),
('i', 'ignored', None, _('show only ignored files')),
('n', 'no-status', None, _('hide status prefix')),
- ('t', 'terse', '', _('show the terse output (EXPERIMENTAL)')),
+ ('t', 'terse', _NOTTERSE, _('show the terse output (EXPERIMENTAL)')),
('C', 'copies', None, _('show source of copied files')),
('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
('', 'rev', [], _('show difference from revision'), _('REV')),
@@ -4898,6 +4953,11 @@
revs = opts.get('rev')
change = opts.get('change')
terse = opts.get('terse')
+ if terse is _NOTTERSE:
+ if revs:
+ terse = ''
+ else:
+ terse = ui.config('commands', 'status.terse')
if revs and change:
msg = _('cannot specify --rev and --change at the same time')
@@ -5583,18 +5643,19 @@
repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
ctx = scmutil.revsingle(repo, rev, rev)
rev = ctx.rev()
- if ctx.hidden():
+ hidden = ctx.hidden()
+ repo.ui.setconfig('ui', 'forcemerge', opts.get(r'tool'), 'update')
+
+ ret = hg.updatetotally(ui, repo, rev, brev, clean=clean,
+ updatecheck=updatecheck)
+ if hidden:
ctxstr = ctx.hex()[:12]
- ui.warn(_("updating to a hidden changeset %s\n") % ctxstr)
+ ui.warn(_("updated to hidden changeset %s\n") % ctxstr)
if ctx.obsolete():
obsfatemsg = obsutil._getfilteredreason(repo, ctxstr, ctx)
ui.warn("(%s)\n" % obsfatemsg)
-
- repo.ui.setconfig('ui', 'forcemerge', opts.get(r'tool'), 'update')
-
- return hg.updatetotally(ui, repo, rev, brev, clean=clean,
- updatecheck=updatecheck)
+ return ret
@command('verify', [])
def verify(ui, repo):
--- a/mercurial/commandserver.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/commandserver.py Wed Jun 06 13:31:24 2018 -0400
@@ -256,7 +256,7 @@
self.cout, self.cerr)
try:
- ret = (dispatch.dispatch(req) or 0) & 255 # might return None
+ ret = dispatch.dispatch(req) & 255
self.cresult.write(struct.pack('>i', int(ret)))
finally:
# restore old cwd
--- a/mercurial/config.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/config.py Wed Jun 06 13:31:24 2018 -0400
@@ -215,7 +215,7 @@
parts.append('')
if s[offset:offset + 1] == '"' and not parts[-1]:
return _parse_quote, parts, offset + 1
- elif s[offset:offset + 1] == '"' and parts[-1][-1] == '\\':
+ elif s[offset:offset + 1] == '"' and parts[-1][-1:] == '\\':
parts[-1] = parts[-1][:-1] + s[offset:offset + 1]
return _parse_plain, parts, offset + 1
parts[-1] += s[offset:offset + 1]
--- a/mercurial/configitems.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/configitems.py Wed Jun 06 13:31:24 2018 -0400
@@ -193,13 +193,14 @@
coreconfigitem('commands', 'status.skipstates',
default=[],
)
+coreconfigitem('commands', 'status.terse',
+ default='',
+)
coreconfigitem('commands', 'status.verbose',
default=False,
)
coreconfigitem('commands', 'update.check',
default=None,
- # Deprecated, remove after 4.4 release
- alias=[('experimental', 'updatecheck')]
)
coreconfigitem('commands', 'update.requiredest',
default=False,
@@ -1225,7 +1226,8 @@
coreconfigitem('web', 'address',
default='',
)
-coreconfigitem('web', 'allow_archive',
+coreconfigitem('web', 'allow-archive',
+ alias=[('web', 'allow_archive')],
default=list,
)
coreconfigitem('web', 'allow_read',
--- a/mercurial/context.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/context.py Wed Jun 06 13:31:24 2018 -0400
@@ -10,7 +10,6 @@
import errno
import filecmp
import os
-import re
import stat
from .i18n import _
@@ -24,7 +23,6 @@
short,
wdirfilenodeids,
wdirid,
- wdirrev,
)
from . import (
dagop,
@@ -52,8 +50,6 @@
propertycache = util.propertycache
-nonascii = re.compile(br'[^\x21-\x7f]').search
-
class basectx(object):
"""A basectx object represents the common logic for its children:
changectx: read-only context that is already present in the repo,
@@ -377,31 +373,6 @@
return r
-def changectxdeprecwarn(repo):
- # changectx's constructor will soon lose support for these forms of
- # changeids:
- # * stringinfied ints
- # * bookmarks, tags, branches, and other namespace identifiers
- # * hex nodeid prefixes
- #
- # Depending on your use case, replace repo[x] by one of these:
- # * If you want to support general revsets, use scmutil.revsingle(x)
- # * If you know that "x" is a stringified int, use repo[int(x)]
- # * If you know that "x" is a bookmark, use repo._bookmarks.changectx(x)
- # * If you know that "x" is a tag, use repo[repo.tags()[x]]
- # * If you know that "x" is a branch or in some other namespace,
- # use the appropriate mechanism for that namespace
- # * If you know that "x" is a hex nodeid prefix, use
- # repo[scmutil.resolvehexnodeidprefix(repo, x)]
- # * If "x" is a string that can be any of the above, but you don't want
- # to allow general revsets (perhaps because "x" may come from a remote
- # user and the revset may be too costly), use scmutil.revsymbol(repo, x)
- # * If "x" can be a mix of the above, you'll have to figure it out
- # yourself
- repo.ui.deprecwarn("changectx.__init__ is getting more limited, see "
- "context.changectxdeprecwarn() for details", "4.6",
- stacklevel=4)
-
class changectx(basectx):
"""A changecontext object makes access to data related to a particular
changeset convenient. It represents a read-only context already present in
@@ -415,22 +386,22 @@
self._node = repo.changelog.node(changeid)
self._rev = changeid
return
- if changeid == 'null':
+ elif changeid == 'null':
self._node = nullid
self._rev = nullrev
return
- if changeid == 'tip':
+ elif changeid == 'tip':
self._node = repo.changelog.tip()
self._rev = repo.changelog.rev(self._node)
return
- if (changeid == '.'
- or repo.local() and changeid == repo.dirstate.p1()):
+ elif (changeid == '.'
+ or repo.local() and changeid == repo.dirstate.p1()):
# this is a hack to delay/avoid loading obsmarkers
# when we know that '.' won't be hidden
self._node = repo.dirstate.p1()
self._rev = repo.unfiltered().changelog.rev(self._node)
return
- if len(changeid) == 20:
+ elif len(changeid) == 20:
try:
self._node = changeid
self._rev = repo.changelog.rev(changeid)
@@ -438,27 +409,17 @@
except error.FilteredLookupError:
raise
except LookupError:
- pass
+ # check if it might have come from damaged dirstate
+ #
+ # XXX we could avoid the unfiltered if we had a recognizable
+ # exception for filtered changeset access
+ if (repo.local()
+ and changeid in repo.unfiltered().dirstate.parents()):
+ msg = _("working directory has unknown parent '%s'!")
+ raise error.Abort(msg % short(changeid))
+ changeid = hex(changeid) # for the error message
- try:
- r = int(changeid)
- if '%d' % r != changeid:
- raise ValueError
- l = len(repo.changelog)
- if r < 0:
- r += l
- if r < 0 or r >= l and r != wdirrev:
- raise ValueError
- self._rev = r
- self._node = repo.changelog.node(r)
- changectxdeprecwarn(repo)
- return
- except error.FilteredIndexError:
- raise
- except (ValueError, OverflowError, IndexError):
- pass
-
- if len(changeid) == 40:
+ elif len(changeid) == 40:
try:
self._node = bin(changeid)
self._rev = repo.changelog.rev(self._node)
@@ -468,38 +429,10 @@
except (TypeError, LookupError):
pass
- # lookup bookmarks through the name interface
- try:
- self._node = repo.names.singlenode(repo, changeid)
- self._rev = repo.changelog.rev(self._node)
- changectxdeprecwarn(repo)
- return
- except KeyError:
- pass
-
- self._node = scmutil.resolvehexnodeidprefix(repo, changeid)
- if self._node is not None:
- self._rev = repo.changelog.rev(self._node)
- changectxdeprecwarn(repo)
- return
-
# lookup failed
- # check if it might have come from damaged dirstate
- #
- # XXX we could avoid the unfiltered if we had a recognizable
- # exception for filtered changeset access
- if (repo.local()
- and changeid in repo.unfiltered().dirstate.parents()):
- msg = _("working directory has unknown parent '%s'!")
- raise error.Abort(msg % short(changeid))
- try:
- if len(changeid) == 20 and nonascii(changeid):
- changeid = hex(changeid)
- except TypeError:
- pass
except (error.FilteredIndexError, error.FilteredLookupError):
raise error.FilteredRepoLookupError(_("filtered revision '%s'")
- % changeid)
+ % pycompat.bytestr(changeid))
except error.FilteredRepoLookupError:
raise
except IndexError:
--- a/mercurial/copies.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/copies.py Wed Jun 06 13:31:24 2018 -0400
@@ -254,6 +254,11 @@
repo.ui.debug("%s:\n %s\n" % (header % 'local', "\n ".join(u1)))
if u2:
repo.ui.debug("%s:\n %s\n" % (header % 'other', "\n ".join(u2)))
+
+ narrowmatch = repo.narrowmatch()
+ if not narrowmatch.always():
+ u1 = [f for f in u1 if narrowmatch(f)]
+ u2 = [f for f in u2 if narrowmatch(f)]
return u1, u2
def _makegetfctx(ctx):
--- a/mercurial/crecord.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/crecord.py Wed Jun 06 13:31:24 2018 -0400
@@ -65,6 +65,11 @@
# compiled with curses
curses = False
+class fallbackerror(error.Abort):
+ """Error that indicates the client should try to fallback to text mode."""
+ # Inherits from error.Abort so that existing behavior is preserved if the
+ # calling code does not know how to fallback.
+
def checkcurses(ui):
"""Return True if the user wants to use curses
@@ -529,8 +534,8 @@
origsigtstp = signal.getsignal(signal.SIGTSTP)
try:
curses.wrapper(chunkselector.main)
- if chunkselector.initerr is not None:
- raise error.Abort(chunkselector.initerr)
+ if chunkselector.initexc is not None:
+ raise chunkselector.initexc
# ncurses does not restore signal handler for SIGTSTP
finally:
if origsigtstp is not sentinel:
@@ -549,7 +554,7 @@
"""
chunkselector = curseschunkselector(headerlist, ui, operation)
if testfn and os.path.exists(testfn):
- testf = open(testfn)
+ testf = open(testfn, 'rb')
testcommands = [x.rstrip('\n') for x in testf.readlines()]
testf.close()
while True:
@@ -1718,7 +1723,7 @@
self.stdscr = stdscr
# error during initialization, cannot be printed in the curses
# interface, it should be printed by the calling code
- self.initerr = None
+ self.initexc = None
self.yscreensize, self.xscreensize = self.stdscr.getmaxyx()
curses.start_color()
@@ -1751,7 +1756,8 @@
try:
self.chunkpad = curses.newpad(self.numpadlines, self.xscreensize)
except curses.error:
- self.initerr = _('this diff is too large to be displayed')
+ self.initexc = fallbackerror(
+ _('this diff is too large to be displayed'))
return
# initialize selecteditemendline (initial start-line is 0)
self.selecteditemendline = self.getnumlinesdisplayed(
--- a/mercurial/debugcommands.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/debugcommands.py Wed Jun 06 13:31:24 2018 -0400
@@ -21,7 +21,6 @@
import string
import subprocess
import sys
-import tempfile
import time
from .i18n import _
@@ -630,6 +629,8 @@
opts = pycompat.byteskwargs(opts)
r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
index = r.index
+ start = r.start
+ length = r.length
generaldelta = r.version & revlog.FLAG_GENERALDELTA
withsparseread = getattr(r, '_withsparseread', False)
@@ -677,8 +678,6 @@
comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
chainbase = chain[0]
chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
- start = r.start
- length = r.length
basestart = start(chainbase)
revstart = start(rev)
lineardist = revstart + comp - basestart
@@ -838,8 +837,8 @@
if output:
dest.close()
-@command('debugextensions', cmdutil.formatteropts, [], norepo=True)
-def debugextensions(ui, **opts):
+@command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
+def debugextensions(ui, repo, **opts):
'''show information about active extensions'''
opts = pycompat.byteskwargs(opts)
exts = extensions.extensions(ui)
@@ -971,7 +970,7 @@
ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
casesensitive = '(unknown)'
try:
- with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
+ with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
except OSError:
pass
@@ -1143,7 +1142,7 @@
opts = pycompat.byteskwargs(opts)
def writetemp(contents):
- (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
+ (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
f = os.fdopen(fd, r"wb")
f.write(contents)
f.close()
@@ -3018,10 +3017,12 @@
if isinstance(res, wireprotov2peer.commandresponse):
val = list(res.cborobjects())
- ui.status(_('response: %s\n') % stringutil.pprint(val))
+ ui.status(_('response: %s\n') %
+ stringutil.pprint(val, bprefix=True))
else:
- ui.status(_('response: %s\n') % stringutil.pprint(res))
+ ui.status(_('response: %s\n') %
+ stringutil.pprint(res, bprefix=True))
elif action == 'batchbegin':
if batchedcommands is not None:
@@ -3093,7 +3094,8 @@
continue
if res.headers.get('Content-Type') == 'application/mercurial-cbor':
- ui.write(_('cbor> %s\n') % stringutil.pprint(cbor.loads(body)))
+ ui.write(_('cbor> %s\n') %
+ stringutil.pprint(cbor.loads(body), bprefix=True))
elif action == 'close':
peer.close()
--- a/mercurial/dispatch.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/dispatch.py Wed Jun 06 13:31:24 2018 -0400
@@ -83,20 +83,23 @@
def run():
"run the command in sys.argv"
- _initstdio()
+ initstdio()
req = request(pycompat.sysargv[1:])
err = None
try:
- status = (dispatch(req) or 0)
+ status = dispatch(req)
except error.StdioError as e:
err = e
status = -1
+
+ # In all cases we try to flush stdio streams.
if util.safehasattr(req.ui, 'fout'):
try:
req.ui.fout.flush()
except IOError as e:
err = e
status = -1
+
if util.safehasattr(req.ui, 'ferr'):
try:
if err is not None and err.errno != errno.EPIPE:
@@ -112,7 +115,7 @@
sys.exit(status & 255)
if pycompat.ispy3:
- def _initstdio():
+ def initstdio():
pass
def _silencestdio():
@@ -132,7 +135,7 @@
except IOError:
pass
else:
- def _initstdio():
+ def initstdio():
for fp in (sys.stdin, sys.stdout, sys.stderr):
procutil.setbinary(fp)
@@ -172,7 +175,7 @@
return ' '.join(procutil.shellquote(a) for a in args)
def dispatch(req):
- "run the command specified in req.args"
+ """run the command specified in req.args; returns an integer status code"""
if req.ferr:
ferr = req.ferr
elif req.ui:
@@ -205,9 +208,9 @@
msg = _formatargs(req.args)
starttime = util.timer()
- ret = None
+ ret = 1 # default of Python exit code on unhandled exception
try:
- ret = _runcatch(req)
+ ret = _runcatch(req) or 0
except error.ProgrammingError as inst:
req.ui.warn(_('** ProgrammingError: %s\n') % inst)
if inst.hint:
@@ -236,7 +239,7 @@
req.ui.log('uiblocked', 'ui blocked ms',
**pycompat.strkwargs(req.ui._blockedtimes))
req.ui.log("commandfinish", "%s exited %d after %0.2f seconds\n",
- msg, ret or 0, duration)
+ msg, ret & 255, duration)
try:
req._runexithandlers()
except: # exiting, so no re-raises
@@ -285,8 +288,8 @@
req.args[2] != 'serve' or
req.args[3] != '--stdio'):
raise error.Abort(
- _('potentially unsafe serve --stdio invocation: %r') %
- (req.args,))
+ _('potentially unsafe serve --stdio invocation: %s') %
+ (stringutil.pprint(req.args),))
try:
debugger = 'pdb'
@@ -1025,7 +1028,7 @@
'** which supports versions %s of Mercurial.\n'
'** Please disable %s and try your action again.\n'
'** If that fixes the bug please report it to %s\n')
- % (name, testedwith, name, report))
+ % (name, testedwith, name, stringutil.forcebytestr(report)))
else:
bugtracker = ui.config('ui', 'supportcontact')
if bugtracker is None:
--- a/mercurial/encoding.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/encoding.py Wed Jun 06 13:31:24 2018 -0400
@@ -93,6 +93,16 @@
def __hash__(self):
return hash(self._utf8) # avoid collisions in local string space
+class safelocalstr(bytes):
+ """Tagged string denoting it was previously an internal UTF-8 string,
+ and can be converted back to UTF-8 losslessly
+
+ >>> assert safelocalstr(b'\\xc3') == b'\\xc3'
+ >>> assert b'\\xc3' == safelocalstr(b'\\xc3')
+ >>> assert b'\\xc3' in {safelocalstr(b'\\xc3'): 0}
+ >>> assert safelocalstr(b'\\xc3') in {b'\\xc3': 0}
+ """
+
def tolocal(s):
"""
Convert a string from internal UTF-8 to local encoding
@@ -140,7 +150,7 @@
r = u.encode(_sysstr(encoding), u"replace")
if u == r.decode(_sysstr(encoding)):
# r is a safe, non-lossy encoding of s
- return r
+ return safelocalstr(r)
return localstr(s, r)
except UnicodeDecodeError:
# we should only get here if we're looking at an ancient changeset
@@ -149,7 +159,7 @@
r = u.encode(_sysstr(encoding), u"replace")
if u == r.decode(_sysstr(encoding)):
# r is a safe, non-lossy encoding of s
- return r
+ return safelocalstr(r)
return localstr(u.encode('UTF-8'), r)
except UnicodeDecodeError:
u = s.decode("utf-8", "replace") # last ditch
@@ -402,7 +412,7 @@
JSON is problematic for us because it doesn't support non-Unicode
bytes. To deal with this, we take the following approach:
- - localstr objects are converted back to UTF-8
+ - localstr/safelocalstr objects are converted back to UTF-8
- valid UTF-8/ASCII strings are passed as-is
- other strings are converted to UTF-8b surrogate encoding
- apply JSON-specified string escaping
@@ -495,6 +505,7 @@
- local strings that have a cached known UTF-8 encoding (aka
localstr) get sent as UTF-8 so Unicode-oriented clients get the
Unicode data they want
+ - non-lossy local strings (aka safelocalstr) get sent as UTF-8 as well
- because we must preserve UTF-8 bytestring in places such as
filenames, metadata can't be roundtripped without help
@@ -504,11 +515,17 @@
internal surrogate encoding as a UTF-8 string.)
'''
- if not isinstance(s, localstr) and isasciistr(s):
+ if isinstance(s, localstr):
+ # assume that the original UTF-8 sequence would never contain
+ # invalid characters in U+DCxx range
+ return s._utf8
+ elif isinstance(s, safelocalstr):
+ # already verified that s is non-lossy in legacy encoding, which
+ # shouldn't contain characters in U+DCxx range
+ return fromlocal(s)
+ elif isasciistr(s):
return s
if "\xed" not in s:
- if isinstance(s, localstr):
- return s._utf8
try:
s.decode('utf-8', _utf8strict)
return s
--- a/mercurial/extensions.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/extensions.py Wed Jun 06 13:31:24 2018 -0400
@@ -7,6 +7,8 @@
from __future__ import absolute_import
+import ast
+import collections
import functools
import imp
import inspect
@@ -145,9 +147,6 @@
"""Check if extension commands have required attributes"""
for c, e in cmdtable.iteritems():
f = e[0]
- if getattr(f, '_deprecatedregistrar', False):
- ui.deprecwarn("cmdutil.command is deprecated, use "
- "registrar.command to register '%s'" % c, '4.6')
missing = [a for a in _cmdfuncattrs if not util.safehasattr(f, a)]
if not missing:
continue
@@ -541,9 +540,8 @@
fn = getattr(fn, '_origfunc', None)
return result
-def _disabledpaths(strip_init=False):
- '''find paths of disabled extensions. returns a dict of {name: path}
- removes /__init__.py from packages if strip_init is True'''
+def _disabledpaths():
+ '''find paths of disabled extensions. returns a dict of {name: path}'''
import hgext
extpath = os.path.dirname(
os.path.abspath(pycompat.fsencode(hgext.__file__)))
@@ -562,8 +560,6 @@
path = os.path.join(extpath, e, '__init__.py')
if not os.path.exists(path):
continue
- if strip_init:
- path = os.path.dirname(path)
if name in exts or name in _order or name == '__init__':
continue
exts[name] = path
@@ -658,48 +654,82 @@
if name in paths:
return _disabledhelp(paths[name])
+def _walkcommand(node):
+ """Scan @command() decorators in the tree starting at node"""
+ todo = collections.deque([node])
+ while todo:
+ node = todo.popleft()
+ if not isinstance(node, ast.FunctionDef):
+ todo.extend(ast.iter_child_nodes(node))
+ continue
+ for d in node.decorator_list:
+ if not isinstance(d, ast.Call):
+ continue
+ if not isinstance(d.func, ast.Name):
+ continue
+ if d.func.id != r'command':
+ continue
+ yield d
+
+def _disabledcmdtable(path):
+ """Construct a dummy command table without loading the extension module
+
+ This may raise IOError or SyntaxError.
+ """
+ with open(path, 'rb') as src:
+ root = ast.parse(src.read(), path)
+ cmdtable = {}
+ for node in _walkcommand(root):
+ if not node.args:
+ continue
+ a = node.args[0]
+ if isinstance(a, ast.Str):
+ name = pycompat.sysbytes(a.s)
+ elif pycompat.ispy3 and isinstance(a, ast.Bytes):
+ name = a.s
+ else:
+ continue
+ cmdtable[name] = (None, [], b'')
+ return cmdtable
+
+def _finddisabledcmd(ui, cmd, name, path, strict):
+ try:
+ cmdtable = _disabledcmdtable(path)
+ except (IOError, SyntaxError):
+ return
+ try:
+ aliases, entry = cmdutil.findcmd(cmd, cmdtable, strict)
+ except (error.AmbiguousCommand, error.UnknownCommand):
+ return
+ for c in aliases:
+ if c.startswith(cmd):
+ cmd = c
+ break
+ else:
+ cmd = aliases[0]
+ doc = _disabledhelp(path)
+ return (cmd, name, doc)
+
def disabledcmd(ui, cmd, strict=False):
- '''import disabled extensions until cmd is found.
- returns (cmdname, extname, module)'''
+ '''find cmd from disabled extensions without importing.
+ returns (cmdname, extname, doc)'''
- paths = _disabledpaths(strip_init=True)
+ paths = _disabledpaths()
if not paths:
raise error.UnknownCommand(cmd)
- def findcmd(cmd, name, path):
- try:
- mod = loadpath(path, 'hgext.%s' % name)
- except Exception:
- return
- try:
- aliases, entry = cmdutil.findcmd(cmd,
- getattr(mod, 'cmdtable', {}), strict)
- except (error.AmbiguousCommand, error.UnknownCommand):
- return
- except Exception:
- ui.warn(_('warning: error finding commands in %s\n') % path)
- ui.traceback()
- return
- for c in aliases:
- if c.startswith(cmd):
- cmd = c
- break
- else:
- cmd = aliases[0]
- return (cmd, name, mod)
-
ext = None
# first, search for an extension with the same name as the command
path = paths.pop(cmd, None)
if path:
- ext = findcmd(cmd, cmd, path)
+ ext = _finddisabledcmd(ui, cmd, cmd, path, strict=strict)
if not ext:
# otherwise, interrogate each extension until there's a match
for name, path in paths.iteritems():
- ext = findcmd(cmd, name, path)
+ ext = _finddisabledcmd(ui, cmd, name, path, strict=strict)
if ext:
break
- if ext and 'DEPRECATED' not in ext.__doc__:
+ if ext:
return ext
raise error.UnknownCommand(cmd)
@@ -729,7 +759,7 @@
else:
version = ''
if isinstance(version, (list, tuple)):
- version = '.'.join(str(o) for o in version)
+ version = '.'.join(pycompat.bytestr(o) for o in version)
return version
def ismoduleinternal(module):
--- a/mercurial/filemerge.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/filemerge.py Wed Jun 06 13:31:24 2018 -0400
@@ -11,7 +11,6 @@
import os
import re
import shutil
-import tempfile
from .i18n import _
from .node import nullid, short
@@ -114,8 +113,16 @@
def _findtool(ui, tool):
if tool in internals:
return tool
+ cmd = _toolstr(ui, tool, "executable", tool)
+ if cmd.startswith('python:'):
+ return cmd
return findexternaltool(ui, tool)
+def _quotetoolpath(cmd):
+ if cmd.startswith('python:'):
+ return cmd
+ return procutil.shellquote(cmd)
+
def findexternaltool(ui, tool):
for kn in ("regkey", "regkeyalt"):
k = _toolstr(ui, tool, kn)
@@ -165,7 +172,7 @@
return ":prompt", None
else:
if toolpath:
- return (force, procutil.shellquote(toolpath))
+ return (force, _quotetoolpath(toolpath))
else:
# mimic HGMERGE if given tool not found
return (force, force)
@@ -183,7 +190,7 @@
mf = match.match(repo.root, '', [pat])
if mf(path) and check(tool, pat, symlink, False, changedelete):
toolpath = _findtool(ui, tool)
- return (tool, procutil.shellquote(toolpath))
+ return (tool, _quotetoolpath(toolpath))
# then merge tools
tools = {}
@@ -208,7 +215,7 @@
for p, t in tools:
if check(t, None, symlink, binary, changedelete):
toolpath = _findtool(ui, t)
- return (t, procutil.shellquote(toolpath))
+ return (t, _quotetoolpath(toolpath))
# internal merge or prompt as last resort
if symlink or binary or changedelete:
@@ -325,7 +332,7 @@
return filectx
def _premerge(repo, fcd, fco, fca, toolconf, files, labels=None):
- tool, toolpath, binary, symlink = toolconf
+ tool, toolpath, binary, symlink, scriptfn = toolconf
if symlink or fcd.isabsent() or fco.isabsent():
return 1
unused, unused, unused, back = files
@@ -361,7 +368,7 @@
return 1 # continue merging
def _mergecheck(repo, mynode, orig, fcd, fco, fca, toolconf):
- tool, toolpath, binary, symlink = toolconf
+ tool, toolpath, binary, symlink, scriptfn = toolconf
if symlink:
repo.ui.warn(_('warning: internal %s cannot merge symlinks '
'for %s\n') % (tool, fcd.path()))
@@ -430,7 +437,7 @@
Generic driver for _imergelocal and _imergeother
"""
assert localorother is not None
- tool, toolpath, binary, symlink = toolconf
+ tool, toolpath, binary, symlink, scriptfn = toolconf
r = simplemerge.simplemerge(repo.ui, fcd, fca, fco, label=labels,
localorother=localorother)
return True, r
@@ -510,7 +517,7 @@
'external merge tools')
def _xmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
- tool, toolpath, binary, symlink = toolconf
+ tool, toolpath, binary, symlink, scriptfn = toolconf
if fcd.isabsent() or fco.isabsent():
repo.ui.warn(_('warning: %s cannot merge change/delete conflict '
'for %s\n') % (tool, fcd.path()))
@@ -551,12 +558,36 @@
args = util.interpolate(
br'\$', replace, args,
lambda s: procutil.shellquote(util.localpath(s)))
- cmd = toolpath + ' ' + args
if _toolbool(ui, tool, "gui"):
repo.ui.status(_('running merge tool %s for file %s\n') %
(tool, fcd.path()))
- repo.ui.debug('launching merge tool: %s\n' % cmd)
- r = ui.system(cmd, cwd=repo.root, environ=env, blockedtag='mergetool')
+ if scriptfn is None:
+ cmd = toolpath + ' ' + args
+ repo.ui.debug('launching merge tool: %s\n' % cmd)
+ r = ui.system(cmd, cwd=repo.root, environ=env,
+ blockedtag='mergetool')
+ else:
+ repo.ui.debug('launching python merge script: %s:%s\n' %
+ (toolpath, scriptfn))
+ r = 0
+ try:
+ # avoid cycle cmdutil->merge->filemerge->extensions->cmdutil
+ from . import extensions
+ mod = extensions.loadpath(toolpath, 'hgmerge.%s' % tool)
+ except Exception:
+ raise error.Abort(_("loading python merge script failed: %s") %
+ toolpath)
+ mergefn = getattr(mod, scriptfn, None)
+ if mergefn is None:
+ raise error.Abort(_("%s does not have function: %s") %
+ (toolpath, scriptfn))
+ argslist = procutil.shellsplit(args)
+ # avoid cycle cmdutil->merge->filemerge->hook->extensions->cmdutil
+ from . import hook
+ ret, raised = hook.pythonhook(ui, repo, "merge", toolpath,
+ mergefn, {'args': argslist}, True)
+ if raised:
+ r = 1
repo.ui.debug('merge tool returned: %d\n' % r)
return True, r, False
@@ -681,7 +712,7 @@
tmproot = None
tmprootprefix = repo.ui.config('experimental', 'mergetempdirprefix')
if tmprootprefix:
- tmproot = tempfile.mkdtemp(prefix=tmprootprefix)
+ tmproot = pycompat.mkdtemp(prefix=tmprootprefix)
def maketempfrompath(prefix, path):
fullbase, ext = os.path.splitext(path)
@@ -692,7 +723,7 @@
name += ext
f = open(name, r"wb")
else:
- fd, name = tempfile.mkstemp(prefix=pre + '.', suffix=ext)
+ fd, name = pycompat.mkstemp(prefix=pre + '.', suffix=ext)
f = os.fdopen(fd, r"wb")
return f, name
@@ -751,9 +782,24 @@
symlink = 'l' in fcd.flags() + fco.flags()
changedelete = fcd.isabsent() or fco.isabsent()
tool, toolpath = _picktool(repo, ui, fd, binary, symlink, changedelete)
+ scriptfn = None
if tool in internals and tool.startswith('internal:'):
# normalize to new-style names (':merge' etc)
tool = tool[len('internal'):]
+ if toolpath and toolpath.startswith('python:'):
+ invalidsyntax = False
+ if toolpath.count(':') >= 2:
+ script, scriptfn = toolpath[7:].rsplit(':', 1)
+ if not scriptfn:
+ invalidsyntax = True
+ # missing :callable can lead to spliting on windows drive letter
+ if '\\' in scriptfn or '/' in scriptfn:
+ invalidsyntax = True
+ else:
+ invalidsyntax = True
+ if invalidsyntax:
+ raise error.Abort(_("invalid 'python:' syntax: %s") % toolpath)
+ toolpath = script
ui.debug("picked tool '%s' for %s (binary %s symlink %s changedelete %s)\n"
% (tool, fd, pycompat.bytestr(binary), pycompat.bytestr(symlink),
pycompat.bytestr(changedelete)))
@@ -774,7 +820,7 @@
precheck = None
isexternal = True
- toolconf = tool, toolpath, binary, symlink
+ toolconf = tool, toolpath, binary, symlink, scriptfn
if mergetype == nomerge:
r, deleted = func(repo, mynode, orig, fcd, fco, fca, toolconf, labels)
--- a/mercurial/fileset.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/fileset.py Wed Jun 06 13:31:24 2018 -0400
@@ -354,7 +354,8 @@
# i18n: "grep" is a keyword
r = re.compile(getstring(x, _("grep requires a pattern")))
except re.error as e:
- raise error.ParseError(_('invalid match pattern: %s') % e)
+ raise error.ParseError(_('invalid match pattern: %s') %
+ stringutil.forcebytestr(e))
return [f for f in mctx.existing() if r.search(mctx.ctx[f].data())]
def _sizetomax(s):
--- a/mercurial/formatter.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/formatter.py Wed Jun 06 13:31:24 2018 -0400
@@ -107,7 +107,6 @@
from __future__ import absolute_import, print_function
-import collections
import contextlib
import itertools
import os
@@ -117,6 +116,9 @@
hex,
short,
)
+from .thirdparty import (
+ attr,
+)
from . import (
error,
@@ -413,8 +415,11 @@
baseformatter.end(self)
self._renderitem('docfooter', {})
-templatespec = collections.namedtuple(r'templatespec',
- r'ref tmpl mapfile')
+@attr.s(frozen=True)
+class templatespec(object):
+ ref = attr.ib()
+ tmpl = attr.ib()
+ mapfile = attr.ib()
def lookuptemplate(ui, topic, tmpl):
"""Find the template matching the given -T/--template spec 'tmpl'
--- a/mercurial/graphmod.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/graphmod.py Wed Jun 06 13:31:24 2018 -0400
@@ -341,6 +341,22 @@
'graphshorten': False,
}
+def outputgraph(ui, graph):
+ """outputs an ASCII graph of a DAG
+
+ this is a helper function for 'ascii' below.
+
+ takes the following arguments:
+
+ - ui to write to
+ - graph data: list of { graph nodes/edges, text }
+
+ this function can be monkey-patched by extensions to alter graph display
+ without needing to mimic all of the edge-fixup logic in ascii()
+ """
+ for (ln, logstr) in graph:
+ ui.write((ln + logstr).rstrip() + "\n")
+
def ascii(ui, state, type, char, text, coldata):
"""prints an ASCII graph of the DAG
@@ -469,9 +485,8 @@
# print lines
indentation_level = max(ncols, ncols + coldiff)
- for (line, logstr) in zip(lines, text):
- ln = "%-*s %s" % (2 * indentation_level, "".join(line), logstr)
- ui.write(ln.rstrip() + '\n')
+ lines = ["%-*s " % (2 * indentation_level, "".join(line)) for line in lines]
+ outputgraph(ui, zip(lines, text))
# ... and start over
state['lastcoldiff'] = coldiff
--- a/mercurial/help.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/help.py Wed Jun 06 13:31:24 2018 -0400
@@ -232,6 +232,7 @@
(['bundlespec'], _("Bundle File Formats"), loaddoc('bundlespec')),
(['color'], _("Colorizing Outputs"), loaddoc('color')),
(["config", "hgrc"], _("Configuration Files"), loaddoc('config')),
+ (['deprecated'], _("Deprecated Features"), loaddoc('deprecated')),
(["dates"], _("Date Formats"), loaddoc('dates')),
(["flags"], _("Command-line flags"), loaddoc('flags')),
(["patterns"], _("File Name Patterns"), loaddoc('patterns')),
@@ -574,9 +575,9 @@
return rst
def helpextcmd(name, subtopic=None):
- cmd, ext, mod = extensions.disabledcmd(ui, name,
+ cmd, ext, doc = extensions.disabledcmd(ui, name,
ui.configbool('ui', 'strict'))
- doc = gettext(pycompat.getdoc(mod)).splitlines()[0]
+ doc = doc.splitlines()[0]
rst = listexts(_("'%s' is provided by the following "
"extension:") % cmd, {ext: doc}, indent=4,
--- a/mercurial/help/config.txt Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/help/config.txt Wed Jun 06 13:31:24 2018 -0400
@@ -442,6 +442,10 @@
Make paths in :hg:`status` output relative to the current directory.
(default: False)
+``status.terse``
+ Default value for the --terse flag, which condenes status output.
+ (default: empty)
+
``update.check``
Determines what level of checking :hg:`update` will perform before moving
to a destination revision. Valid values are ``abort``, ``none``,
@@ -2325,7 +2329,7 @@
``address``
Interface address to bind to. (default: all)
-``allow_archive``
+``allow-archive``
List of archive format (bz2, gz, zip) allowed for downloading.
(default: empty)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/help/deprecated.txt Wed Jun 06 13:31:24 2018 -0400
@@ -0,0 +1,30 @@
+Mercurial evolves over time, some features, options, commands may be replaced by
+better and more secure alternatives. This topic will help you migrating your
+existing usage and/or configuration to newer features.
+
+Commands
+========
+
+The following commands are still available but their use are not recommended:
+
+``locate``
+
+This command has been replaced by `hg files`.
+
+``parents``
+
+This command can be replaced by `hg summary` or `hg log` with appropriate
+revsets. See `hg help revsets` for more information.
+
+``tip``
+
+The recommended alternative is `hg heads`.
+
+Options
+=======
+
+``web.allowpull``
+ Renamed to `allow-pull`.
+
+``web.allow_push``
+ Renamed to `allow-push`.
--- a/mercurial/hgweb/__init__.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/hgweb/__init__.py Wed Jun 06 13:31:24 2018 -0400
@@ -57,7 +57,9 @@
procutil.setsignalhandler()
self.httpd = server.create_server(self.ui, self.app)
- if self.opts['port'] and not self.ui.verbose:
+ if (self.opts['port'] and
+ not self.ui.verbose and
+ not self.opts['print_url']):
return
if self.httpd.prefix:
@@ -78,13 +80,18 @@
fqaddr = self.httpd.fqaddr
if r':' in fqaddr:
fqaddr = r'[%s]' % fqaddr
- if self.opts['port']:
- write = self.ui.status
+
+ url = 'http://%s%s/%s' % (
+ pycompat.sysbytes(fqaddr), pycompat.sysbytes(port), prefix)
+ if self.opts['print_url']:
+ self.ui.write('%s\n' % url)
else:
- write = self.ui.write
- write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
- (pycompat.sysbytes(fqaddr), pycompat.sysbytes(port),
- prefix, pycompat.sysbytes(bindaddr), self.httpd.port))
+ if self.opts['port']:
+ write = self.ui.status
+ else:
+ write = self.ui.write
+ write(_('listening at %s (bound to %s:%d)\n') %
+ (url, pycompat.sysbytes(bindaddr), self.httpd.port))
self.ui.flush() # avoid buffering of status message
def run(self):
--- a/mercurial/hgweb/webcommands.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/hgweb/webcommands.py Wed Jun 06 13:31:24 2018 -0400
@@ -13,7 +13,7 @@
import re
from ..i18n import _
-from ..node import hex, nullid, short
+from ..node import hex, short
from .common import (
ErrorResponse,
@@ -149,7 +149,7 @@
mt = mimetypes.guess_type(f)[0] or 'application/octet-stream'
text = '(binary:%s)' % mt
- def lines():
+ def lines(context):
for lineno, t in enumerate(text.splitlines(True)):
yield {"line": t,
"lineid": "l%d" % (lineno + 1),
@@ -160,7 +160,7 @@
'filerevision',
file=f,
path=webutil.up(f),
- text=lines(),
+ text=templateutil.mappinggenerator(lines),
symrev=webutil.symrevorshortnode(web.req, fctx),
rename=webutil.renamelink(fctx),
permissions=fctx.manifest().flags(f),
@@ -295,9 +295,8 @@
for ctx in searchfunc[0](funcarg):
count += 1
n = ctx.node()
- showtags = webutil.showtag(web.repo, web.tmpl, 'changelogtag', n)
- files = webutil.listfilediffs(web.tmpl, ctx.files(), n,
- web.maxfiles)
+ showtags = webutil.showtag(web.repo, 'changelogtag', n)
+ files = webutil.listfilediffs(ctx.files(), n, web.maxfiles)
lm = webutil.commonentry(web.repo, ctx)
lm.update({
@@ -399,14 +398,8 @@
revs = []
if pos != -1:
revs = web.repo.changelog.revs(pos, 0)
- curcount = 0
- for rev in revs:
- curcount += 1
- if curcount > revcount + 1:
- break
- entry = webutil.changelistentry(web, web.repo[rev])
- entry['parity'] = next(parity)
+ for entry in webutil.changelistentries(web, revs, revcount, parity):
yield entry
if shortlog:
@@ -448,9 +441,9 @@
rev=pos,
symrev=symrev,
changesets=count,
- entries=entries,
- latestentry=latestentry,
- nextentry=nextentry,
+ entries=templateutil.mappinglist(entries),
+ latestentry=templateutil.mappinglist(latestentry),
+ nextentry=templateutil.mappinglist(nextentry),
archives=web.archivelist('tip'),
revcount=revcount,
morevars=morevars,
@@ -563,7 +556,7 @@
if mf and not files and not dirs:
raise ErrorResponse(HTTP_NOT_FOUND, 'path not found: ' + path)
- def filelist(**map):
+ def filelist(context):
for f in sorted(files):
full = files[f]
@@ -575,7 +568,7 @@
"size": fctx.size(),
"permissions": mf.flags(full)}
- def dirlist(**map):
+ def dirlist(context):
for d in sorted(dirs):
emptydirs = []
@@ -598,8 +591,8 @@
path=abspath,
up=webutil.up(abspath),
upparity=next(parity),
- fentries=filelist,
- dentries=dirlist,
+ fentries=templateutil.mappinggenerator(filelist),
+ dentries=templateutil.mappinggenerator(dirlist),
archives=web.archivelist(hex(node)),
**pycompat.strkwargs(webutil.commonentry(web.repo, ctx)))
@@ -618,7 +611,7 @@
i = list(reversed(web.repo.tagslist()))
parity = paritygen(web.stripecount)
- def entries(notip, latestonly, **map):
+ def entries(context, notip, latestonly):
t = i
if notip:
t = [(k, n) for k, n in i if k != "tip"]
@@ -633,9 +626,10 @@
return web.sendtemplate(
'tags',
node=hex(web.repo.changelog.tip()),
- entries=lambda **x: entries(False, False, **x),
- entriesnotip=lambda **x: entries(True, False, **x),
- latestentry=lambda **x: entries(True, True, **x))
+ entries=templateutil.mappinggenerator(entries, args=(False, False)),
+ entriesnotip=templateutil.mappinggenerator(entries,
+ args=(True, False)),
+ latestentry=templateutil.mappinggenerator(entries, args=(True, True)))
@webcommand('bookmarks')
def bookmarks(web):
@@ -654,7 +648,7 @@
i = sorted(i, key=sortkey, reverse=True)
parity = paritygen(web.stripecount)
- def entries(latestonly, **map):
+ def entries(context, latestonly):
t = i
if latestonly:
t = i[:1]
@@ -668,13 +662,14 @@
latestrev = i[0][1]
else:
latestrev = -1
+ lastdate = web.repo[latestrev].date()
return web.sendtemplate(
'bookmarks',
node=hex(web.repo.changelog.tip()),
- lastchange=[{'date': web.repo[latestrev].date()}],
- entries=lambda **x: entries(latestonly=False, **x),
- latestentry=lambda **x: entries(latestonly=True, **x))
+ lastchange=templateutil.mappinglist([{'date': lastdate}]),
+ entries=templateutil.mappinggenerator(entries, args=(False,)),
+ latestentry=templateutil.mappinggenerator(entries, args=(True,)))
@webcommand('branches')
def branches(web):
@@ -732,7 +727,7 @@
'date': web.repo[n].date(),
}
- def bookmarks(**map):
+ def bookmarks(context):
parity = paritygen(web.stripecount)
marks = [b for b in web.repo._bookmarks.items() if b[1] in web.repo]
sortkey = lambda b: (web.repo[b[1]].rev(), b[0])
@@ -774,7 +769,7 @@
owner=get_contact(web.config) or 'unknown',
lastchange=tip.date(),
tags=templateutil.mappinggenerator(tagentries, name='tagentry'),
- bookmarks=bookmarks,
+ bookmarks=templateutil.mappinggenerator(bookmarks),
branches=webutil.branchentries(web.repo, web.stripecount, 10),
shortlog=templateutil.mappinggenerator(changelist,
name='shortlogentry'),
@@ -819,7 +814,7 @@
rename = webutil.renamelink(fctx)
ctx = fctx
else:
- rename = []
+ rename = templateutil.mappinglist([])
ctx = ctx
return web.sendtemplate(
@@ -887,12 +882,12 @@
pfctx = ctx.parents()[0][path]
leftlines = filelines(pfctx)
- comparison = webutil.compare(web.tmpl, context, leftlines, rightlines)
+ comparison = webutil.compare(context, leftlines, rightlines)
if fctx is not None:
rename = webutil.renamelink(fctx)
ctx = fctx
else:
- rename = []
+ rename = templateutil.mappinglist([])
ctx = ctx
return web.sendtemplate(
@@ -934,7 +929,7 @@
# TODO there are still redundant operations within basefilectx.parents()
# and from the fctx.annotate() call itself that could be cached.
parentscache = {}
- def parents(f):
+ def parents(context, f):
rev = f.rev()
if rev not in parentscache:
parentscache[rev] = []
@@ -948,7 +943,7 @@
for p in parentscache[rev]:
yield p
- def annotate(**map):
+ def annotate(context):
if fctx.isbinary():
mt = (mimetypes.guess_type(fctx.path())[0]
or 'application/octet-stream')
@@ -972,7 +967,7 @@
"node": f.hex(),
"rev": rev,
"author": f.user(),
- "parents": parents(f),
+ "parents": templateutil.mappinggenerator(parents, args=(f,)),
"desc": f.description(),
"extra": f.extra(),
"file": f.path(),
@@ -991,13 +986,13 @@
return web.sendtemplate(
'fileannotate',
file=f,
- annotate=annotate,
+ annotate=templateutil.mappinggenerator(annotate),
path=webutil.up(f),
symrev=webutil.symrevorshortnode(web.req, fctx),
rename=webutil.renamelink(fctx),
permissions=fctx.manifest().flags(f),
ishead=int(ishead),
- diffopts=diffopts,
+ diffopts=templateutil.hybriddict(diffopts),
**pycompat.strkwargs(webutil.commonentry(web.repo, fctx)))
@webcommand('filelog')
@@ -1095,13 +1090,16 @@
diffs = diff(c, linerange=lr)
# follow renames accross filtered (not in range) revisions
path = c.path()
- entries.append(dict(
- parity=next(parity),
- filerev=c.rev(),
- file=path,
- diff=diffs,
- linerange=webutil.formatlinerange(*lr),
- **pycompat.strkwargs(webutil.commonentry(repo, c))))
+ lm = webutil.commonentry(repo, c)
+ lm.update({
+ 'parity': next(parity),
+ 'filerev': c.rev(),
+ 'file': path,
+ 'diff': diffs,
+ 'linerange': webutil.formatlinerange(*lr),
+ 'rename': templateutil.mappinglist([]),
+ })
+ entries.append(lm)
if i == revcount:
break
lessvars['linerange'] = webutil.formatlinerange(*lrange)
@@ -1112,13 +1110,15 @@
diffs = None
if patch:
diffs = diff(iterfctx)
- entries.append(dict(
- parity=next(parity),
- filerev=i,
- file=f,
- diff=diffs,
- rename=webutil.renamelink(iterfctx),
- **pycompat.strkwargs(webutil.commonentry(repo, iterfctx))))
+ lm = webutil.commonentry(repo, iterfctx)
+ lm.update({
+ 'parity': next(parity),
+ 'filerev': i,
+ 'file': f,
+ 'diff': diffs,
+ 'rename': webutil.renamelink(iterfctx),
+ })
+ entries.append(lm)
entries.reverse()
revnav = webutil.filerevnav(web.repo, fctx.path())
nav = revnav.gen(end - 1, revcount, count)
@@ -1130,10 +1130,10 @@
file=f,
nav=nav,
symrev=webutil.symrevorshortnode(web.req, fctx),
- entries=entries,
+ entries=templateutil.mappinglist(entries),
descend=descend,
patch=patch,
- latestentry=latestentry,
+ latestentry=templateutil.mappinglist(latestentry),
linerange=linerange,
revcount=revcount,
morevars=morevars,
@@ -1162,7 +1162,7 @@
"""
type_ = web.req.qsparams.get('type')
- allowed = web.configlist("web", "allow_archive")
+ allowed = web.configlist("web", "allow-archive")
key = web.req.qsparams['node']
if type_ not in webutil.archivespecs:
@@ -1314,24 +1314,6 @@
tree = list(item for item in graphmod.colored(dag, web.repo)
if item[1] == graphmod.CHANGESET)
- def nodecurrent(ctx):
- wpnodes = web.repo.dirstate.parents()
- if wpnodes[1] == nullid:
- wpnodes = wpnodes[:1]
- if ctx.node() in wpnodes:
- return '@'
- return ''
-
- def nodesymbol(ctx):
- if ctx.obsolete():
- return 'x'
- elif ctx.isunstable():
- return '*'
- elif ctx.closesbranch():
- return '_'
- else:
- return 'o'
-
def fulltree():
pos = web.repo[graphtop].rev()
tree = []
@@ -1342,14 +1324,14 @@
if item[1] == graphmod.CHANGESET)
return tree
- def jsdata():
- return [{'node': pycompat.bytestr(ctx),
- 'graphnode': nodecurrent(ctx) + nodesymbol(ctx),
- 'vertex': vtx,
- 'edges': edges}
- for (id, type, ctx, vtx, edges) in fulltree()]
+ def jsdata(context):
+ for (id, type, ctx, vtx, edges) in fulltree():
+ yield {'node': pycompat.bytestr(ctx),
+ 'graphnode': webutil.getgraphnode(web.repo, ctx),
+ 'vertex': vtx,
+ 'edges': edges}
- def nodes():
+ def nodes(context):
parity = paritygen(web.stripecount)
for row, (id, type, ctx, vtx, edges) in enumerate(tree):
entry = webutil.commonentry(web.repo, ctx)
@@ -1363,7 +1345,7 @@
entry.update({'col': vtx[0],
'color': (vtx[1] - 1) % 6 + 1,
'parity': next(parity),
- 'edges': edgedata,
+ 'edges': templateutil.mappinglist(edgedata),
'row': row,
'nextrow': row + 1})
@@ -1384,9 +1366,9 @@
rows=rows,
bg_height=bg_height,
changesets=count,
- nextentry=nextentry,
- jsdata=lambda **x: jsdata(),
- nodes=lambda **x: nodes(),
+ nextentry=templateutil.mappinglist(nextentry),
+ jsdata=templateutil.mappinggenerator(jsdata),
+ nodes=templateutil.mappinggenerator(nodes),
node=ctx.hex(),
changenav=changenav)
@@ -1417,7 +1399,7 @@
topicname = web.req.qsparams.get('node')
if not topicname:
- def topics(**map):
+ def topics(context):
for entries, summary, _doc in helpmod.helptable:
yield {'topic': entries[0], 'summary': summary}
@@ -1436,19 +1418,19 @@
early.sort()
other.sort()
- def earlycommands(**map):
+ def earlycommands(context):
for c, doc in early:
yield {'topic': c, 'summary': doc}
- def othercommands(**map):
+ def othercommands(context):
for c, doc in other:
yield {'topic': c, 'summary': doc}
return web.sendtemplate(
'helptopics',
- topics=topics,
- earlycommands=earlycommands,
- othercommands=othercommands,
+ topics=templateutil.mappinggenerator(topics),
+ earlycommands=templateutil.mappinggenerator(earlycommands),
+ othercommands=templateutil.mappinggenerator(othercommands),
title='Index')
# Render an index of sub-topics.
@@ -1463,7 +1445,7 @@
return web.sendtemplate(
'helptopics',
- topics=topics,
+ topics=templateutil.mappinglist(topics),
title=topicname,
subindex=True)
--- a/mercurial/hgweb/webutil.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/hgweb/webutil.py Wed Jun 06 13:31:24 2018 -0400
@@ -51,7 +51,7 @@
))
def archivelist(ui, nodeid, url=None):
- allowed = ui.configlist('web', 'allow_archive', untrusted=True)
+ allowed = ui.configlist('web', 'allow-archive', untrusted=True)
archives = []
for typ, spec in archivespecs.iteritems():
@@ -234,14 +234,14 @@
def renamelink(fctx):
r = fctx.renamed()
if r:
- return [{'file': r[0], 'node': hex(r[1])}]
- return []
+ return templateutil.mappinglist([{'file': r[0], 'node': hex(r[1])}])
+ return templateutil.mappinglist([])
def nodetagsdict(repo, node):
- return [{"name": i} for i in repo.nodetags(node)]
+ return templateutil.hybridlist(repo.nodetags(node), name='name')
def nodebookmarksdict(repo, node):
- return [{"name": i} for i in repo.nodebookmarks(node)]
+ return templateutil.hybridlist(repo.nodebookmarks(node), name='name')
def nodebranchdict(repo, ctx):
branches = []
@@ -253,8 +253,8 @@
except error.RepoLookupError:
branchnode = None
if branchnode == ctx.node():
- branches.append({"name": branch})
- return branches
+ branches.append(branch)
+ return templateutil.hybridlist(branches, name='name')
def nodeinbranch(repo, ctx):
branches = []
@@ -264,29 +264,27 @@
except error.RepoLookupError:
branchnode = None
if branch != 'default' and branchnode != ctx.node():
- branches.append({"name": branch})
- return branches
+ branches.append(branch)
+ return templateutil.hybridlist(branches, name='name')
def nodebranchnodefault(ctx):
branches = []
branch = ctx.branch()
if branch != 'default':
- branches.append({"name": branch})
- return branches
+ branches.append(branch)
+ return templateutil.hybridlist(branches, name='name')
+
+def _nodenamesgen(context, f, node, name):
+ for t in f(node):
+ yield {name: t}
-def showtag(repo, tmpl, t1, node=nullid, **args):
- args = pycompat.byteskwargs(args)
- for t in repo.nodetags(node):
- lm = args.copy()
- lm['tag'] = t
- yield tmpl.generate(t1, lm)
+def showtag(repo, t1, node=nullid):
+ args = (repo.nodetags, node, 'tag')
+ return templateutil.mappinggenerator(_nodenamesgen, args=args, name=t1)
-def showbookmark(repo, tmpl, t1, node=nullid, **args):
- args = pycompat.byteskwargs(args)
- for t in repo.nodebookmarks(node):
- lm = args.copy()
- lm['bookmark'] = t
- yield tmpl.generate(t1, lm)
+def showbookmark(repo, t1, node=nullid):
+ args = (repo.nodebookmarks, node, 'bookmark')
+ return templateutil.mappinggenerator(_nodenamesgen, args=args, name=t1)
def branchentries(repo, stripecount, limit=0):
tips = []
@@ -294,7 +292,7 @@
parity = paritygen(stripecount)
sortkey = lambda item: (not item[1], item[0].rev())
- def entries(**map):
+ def entries(context):
count = 0
if not tips:
for tag, hs, tip, closed in repo.branchmap().iterbranches():
@@ -317,7 +315,7 @@
'date': ctx.date()
}
- return entries
+ return templateutil.mappinggenerator(entries)
def cleanpath(repo, path):
path = path.lstrip('/')
@@ -380,7 +378,7 @@
def formatlinerange(fromline, toline):
return '%d:%d' % (fromline + 1, toline)
-def succsandmarkers(context, mapping):
+def _succsandmarkersgen(context, mapping):
repo = context.resource(mapping, 'repo')
itemmappings = templatekw.showsuccsandmarkers(context, mapping)
for item in itemmappings.tovalue(context, mapping):
@@ -388,10 +386,13 @@
for successor in item['successors'])
yield item
+def succsandmarkers(context, mapping):
+ return templateutil.mappinggenerator(_succsandmarkersgen, args=(mapping,))
+
# teach templater succsandmarkers is switched to (context, mapping) API
succsandmarkers._requires = {'repo', 'ctx'}
-def whyunstable(context, mapping):
+def _whyunstablegen(context, mapping):
repo = context.resource(mapping, 'repo')
ctx = context.resource(mapping, 'ctx')
@@ -401,6 +402,9 @@
entry['divergentnodes'] = _siblings(entry['divergentnodes'])
yield entry
+def whyunstable(context, mapping):
+ return templateutil.mappinggenerator(_whyunstablegen, args=(mapping,))
+
whyunstable._requires = {'repo', 'ctx'}
def commonentry(repo, ctx):
@@ -419,7 +423,8 @@
'phase': ctx.phasestr(),
'obsolete': ctx.obsolete(),
'succsandmarkers': succsandmarkers,
- 'instabilities': [{"instability": i} for i in ctx.instabilities()],
+ 'instabilities': templateutil.hybridlist(ctx.instabilities(),
+ name='instability'),
'whyunstable': whyunstable,
'branch': nodebranchnodefault(ctx),
'inbranch': nodeinbranch(repo, ctx),
@@ -439,8 +444,8 @@
repo = web.repo
rev = ctx.rev()
n = ctx.node()
- showtags = showtag(repo, web.tmpl, 'changelogtag', n)
- files = listfilediffs(web.tmpl, ctx.files(), n, web.maxfiles)
+ showtags = showtag(repo, 'changelogtag', n)
+ files = listfilediffs(ctx.files(), n, web.maxfiles)
entry = commonentry(repo, ctx)
entry.update(
@@ -452,30 +457,45 @@
)
return entry
+def changelistentries(web, revs, maxcount, parityfn):
+ """Emit up to N records for an iterable of revisions."""
+ repo = web.repo
+
+ count = 0
+ for rev in revs:
+ if count >= maxcount:
+ break
+
+ count += 1
+
+ entry = changelistentry(web, repo[rev])
+ entry['parity'] = next(parityfn)
+
+ yield entry
+
def symrevorshortnode(req, ctx):
if 'node' in req.qsparams:
return templatefilters.revescape(req.qsparams['node'])
else:
return short(ctx.node())
-def changesetentry(web, ctx):
- '''Obtain a dictionary to be used to render the "changeset" template.'''
-
- showtags = showtag(web.repo, web.tmpl, 'changesettag', ctx.node())
- showbookmarks = showbookmark(web.repo, web.tmpl, 'changesetbookmark',
- ctx.node())
- showbranch = nodebranchnodefault(ctx)
-
- files = []
- parity = paritygen(web.stripecount)
+def _listfilesgen(context, ctx, stripecount):
+ parity = paritygen(stripecount)
for blockno, f in enumerate(ctx.files()):
template = 'filenodelink' if f in ctx else 'filenolink'
- files.append(web.tmpl.generate(template, {
+ yield context.process(template, {
'node': ctx.hex(),
'file': f,
'blockno': blockno + 1,
'parity': next(parity),
- }))
+ })
+
+def changesetentry(web, ctx):
+ '''Obtain a dictionary to be used to render the "changeset" template.'''
+
+ showtags = showtag(web.repo, 'changesettag', ctx.node())
+ showbookmarks = showbookmark(web.repo, 'changesetbookmark', ctx.node())
+ showbranch = nodebranchnodefault(ctx)
basectx = basechangectx(web.repo, web.req)
if basectx is None:
@@ -489,7 +509,7 @@
parity = paritygen(web.stripecount)
diffstatsgen = diffstatgen(ctx, basectx)
- diffstats = diffstat(web.tmpl, ctx, diffstatsgen, parity)
+ diffstats = diffstat(ctx, diffstatsgen, parity)
return dict(
diff=diff,
@@ -498,40 +518,43 @@
changesettag=showtags,
changesetbookmark=showbookmarks,
changesetbranch=showbranch,
- files=files,
+ files=templateutil.mappedgenerator(_listfilesgen,
+ args=(ctx, web.stripecount)),
diffsummary=lambda **x: diffsummary(diffstatsgen),
diffstat=diffstats,
archives=web.archivelist(ctx.hex()),
**pycompat.strkwargs(commonentry(web.repo, ctx)))
-def listfilediffs(tmpl, files, node, max):
+def _listfilediffsgen(context, files, node, max):
for f in files[:max]:
- yield tmpl.generate('filedifflink', {'node': hex(node), 'file': f})
+ yield context.process('filedifflink', {'node': hex(node), 'file': f})
if len(files) > max:
- yield tmpl.generate('fileellipses', {})
+ yield context.process('fileellipses', {})
-def diffs(web, ctx, basectx, files, style, linerange=None,
- lineidprefix=''):
+def listfilediffs(files, node, max):
+ return templateutil.mappedgenerator(_listfilediffsgen,
+ args=(files, node, max))
- def prettyprintlines(lines, blockno):
- for lineno, l in enumerate(lines, 1):
- difflineno = "%d.%d" % (blockno, lineno)
- if l.startswith('+'):
- ltype = "difflineplus"
- elif l.startswith('-'):
- ltype = "difflineminus"
- elif l.startswith('@'):
- ltype = "difflineat"
- else:
- ltype = "diffline"
- yield web.tmpl.generate(ltype, {
- 'line': l,
- 'lineno': lineno,
- 'lineid': lineidprefix + "l%s" % difflineno,
- 'linenumber': "% 8s" % difflineno,
- })
+def _prettyprintdifflines(context, lines, blockno, lineidprefix):
+ for lineno, l in enumerate(lines, 1):
+ difflineno = "%d.%d" % (blockno, lineno)
+ if l.startswith('+'):
+ ltype = "difflineplus"
+ elif l.startswith('-'):
+ ltype = "difflineminus"
+ elif l.startswith('@'):
+ ltype = "difflineat"
+ else:
+ ltype = "diffline"
+ yield context.process(ltype, {
+ 'line': l,
+ 'lineno': lineno,
+ 'lineid': lineidprefix + "l%s" % difflineno,
+ 'linenumber': "% 8s" % difflineno,
+ })
- repo = web.repo
+def _diffsgen(context, repo, ctx, basectx, files, style, stripecount,
+ linerange, lineidprefix):
if files:
m = match.exact(repo.root, repo.getcwd(), files)
else:
@@ -540,7 +563,7 @@
diffopts = patch.diffopts(repo.ui, untrusted=True)
node1 = basectx.node()
node2 = ctx.node()
- parity = paritygen(web.stripecount)
+ parity = paritygen(stripecount)
diffhunks = patch.diffhunks(repo, node1, node2, m, opts=diffopts)
for blockno, (fctx1, fctx2, header, hunks) in enumerate(diffhunks, 1):
@@ -554,64 +577,82 @@
continue
lines.extend(hunklines)
if lines:
- yield web.tmpl.generate('diffblock', {
+ l = templateutil.mappedgenerator(_prettyprintdifflines,
+ args=(lines, blockno,
+ lineidprefix))
+ yield {
'parity': next(parity),
'blockno': blockno,
- 'lines': prettyprintlines(lines, blockno),
- })
+ 'lines': l,
+ }
-def compare(tmpl, context, leftlines, rightlines):
- '''Generator function that provides side-by-side comparison data.'''
+def diffs(web, ctx, basectx, files, style, linerange=None, lineidprefix=''):
+ args = (web.repo, ctx, basectx, files, style, web.stripecount,
+ linerange, lineidprefix)
+ return templateutil.mappinggenerator(_diffsgen, args=args, name='diffblock')
- def compline(type, leftlineno, leftline, rightlineno, rightline):
- lineid = leftlineno and ("l%d" % leftlineno) or ''
- lineid += rightlineno and ("r%d" % rightlineno) or ''
- llno = '%d' % leftlineno if leftlineno else ''
- rlno = '%d' % rightlineno if rightlineno else ''
- return tmpl.generate('comparisonline', {
- 'type': type,
- 'lineid': lineid,
- 'leftlineno': leftlineno,
- 'leftlinenumber': "% 6s" % llno,
- 'leftline': leftline or '',
- 'rightlineno': rightlineno,
- 'rightlinenumber': "% 6s" % rlno,
- 'rightline': rightline or '',
- })
+def _compline(type, leftlineno, leftline, rightlineno, rightline):
+ lineid = leftlineno and ("l%d" % leftlineno) or ''
+ lineid += rightlineno and ("r%d" % rightlineno) or ''
+ llno = '%d' % leftlineno if leftlineno else ''
+ rlno = '%d' % rightlineno if rightlineno else ''
+ return {
+ 'type': type,
+ 'lineid': lineid,
+ 'leftlineno': leftlineno,
+ 'leftlinenumber': "% 6s" % llno,
+ 'leftline': leftline or '',
+ 'rightlineno': rightlineno,
+ 'rightlinenumber': "% 6s" % rlno,
+ 'rightline': rightline or '',
+ }
- def getblock(opcodes):
- for type, llo, lhi, rlo, rhi in opcodes:
- len1 = lhi - llo
- len2 = rhi - rlo
- count = min(len1, len2)
- for i in xrange(count):
- yield compline(type=type,
- leftlineno=llo + i + 1,
- leftline=leftlines[llo + i],
- rightlineno=rlo + i + 1,
- rightline=rightlines[rlo + i])
- if len1 > len2:
- for i in xrange(llo + count, lhi):
- yield compline(type=type,
- leftlineno=i + 1,
- leftline=leftlines[i],
- rightlineno=None,
- rightline=None)
- elif len2 > len1:
- for i in xrange(rlo + count, rhi):
- yield compline(type=type,
- leftlineno=None,
- leftline=None,
- rightlineno=i + 1,
- rightline=rightlines[i])
+def _getcompblockgen(context, leftlines, rightlines, opcodes):
+ for type, llo, lhi, rlo, rhi in opcodes:
+ len1 = lhi - llo
+ len2 = rhi - rlo
+ count = min(len1, len2)
+ for i in xrange(count):
+ yield _compline(type=type,
+ leftlineno=llo + i + 1,
+ leftline=leftlines[llo + i],
+ rightlineno=rlo + i + 1,
+ rightline=rightlines[rlo + i])
+ if len1 > len2:
+ for i in xrange(llo + count, lhi):
+ yield _compline(type=type,
+ leftlineno=i + 1,
+ leftline=leftlines[i],
+ rightlineno=None,
+ rightline=None)
+ elif len2 > len1:
+ for i in xrange(rlo + count, rhi):
+ yield _compline(type=type,
+ leftlineno=None,
+ leftline=None,
+ rightlineno=i + 1,
+ rightline=rightlines[i])
+def _getcompblock(leftlines, rightlines, opcodes):
+ args = (leftlines, rightlines, opcodes)
+ return templateutil.mappinggenerator(_getcompblockgen, args=args,
+ name='comparisonline')
+
+def _comparegen(context, contextnum, leftlines, rightlines):
+ '''Generator function that provides side-by-side comparison data.'''
s = difflib.SequenceMatcher(None, leftlines, rightlines)
- if context < 0:
- yield tmpl.generate('comparisonblock',
- {'lines': getblock(s.get_opcodes())})
+ if contextnum < 0:
+ l = _getcompblock(leftlines, rightlines, s.get_opcodes())
+ yield {'lines': l}
else:
- for oc in s.get_grouped_opcodes(n=context):
- yield tmpl.generate('comparisonblock', {'lines': getblock(oc)})
+ for oc in s.get_grouped_opcodes(n=contextnum):
+ l = _getcompblock(leftlines, rightlines, oc)
+ yield {'lines': l}
+
+def compare(contextnum, leftlines, rightlines):
+ args = (contextnum, leftlines, rightlines)
+ return templateutil.mappinggenerator(_comparegen, args=args,
+ name='comparisonblock')
def diffstatgen(ctx, basectx):
'''Generator function that provides the diffstat data.'''
@@ -629,9 +670,7 @@
return _(' %d files changed, %d insertions(+), %d deletions(-)\n') % (
len(stats), addtotal, removetotal)
-def diffstat(tmpl, ctx, statgen, parity):
- '''Return a diffstat template for each file in the diff.'''
-
+def _diffstattmplgen(context, ctx, statgen, parity):
stats, maxname, maxtotal, addtotal, removetotal, binary = next(statgen)
files = ctx.files()
@@ -645,7 +684,7 @@
template = 'diffstatlink' if filename in files else 'diffstatnolink'
total = adds + removes
fileno += 1
- yield tmpl.generate(template, {
+ yield context.process(template, {
'node': ctx.hex(),
'file': filename,
'fileno': fileno,
@@ -655,6 +694,11 @@
'parity': next(parity),
})
+def diffstat(ctx, statgen, parity):
+ '''Return a diffstat template for each file in the diff.'''
+ args = (ctx, statgen, parity)
+ return templateutil.mappedgenerator(_diffstattmplgen, args=args)
+
class sessionvars(templateutil.wrapped):
def __init__(self, vars, start='?'):
self._start = start
@@ -733,3 +777,7 @@
repo.ui.warn(_("websub: invalid regexp for %s: %s\n")
% (key, regexp))
return websubtable
+
+def getgraphnode(repo, ctx):
+ return (templatekw.getgraphnodecurrent(repo, ctx) +
+ templatekw.getgraphnodesymbol(ctx))
--- a/mercurial/hook.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/hook.py Wed Jun 06 13:31:24 2018 -0400
@@ -24,7 +24,7 @@
stringutil,
)
-def _pythonhook(ui, repo, htype, hname, funcname, args, throw):
+def pythonhook(ui, repo, htype, hname, funcname, args, throw):
'''call python hook. hook is callable object, looked up as
name in python module. if callable returns "true", hook
fails, else passes. if hook raises exception, treated as
@@ -138,7 +138,7 @@
if callable(v):
v = v()
if isinstance(v, (dict, list)):
- v = stringutil.pprint(v, bprefix=False)
+ v = stringutil.pprint(v)
env['HG_' + k.upper()] = v
if repo:
@@ -242,7 +242,7 @@
r = 1
raised = False
elif callable(cmd):
- r, raised = _pythonhook(ui, repo, htype, hname, cmd, args,
+ r, raised = pythonhook(ui, repo, htype, hname, cmd, args,
throw)
elif cmd.startswith('python:'):
if cmd.count(':') >= 2:
@@ -258,7 +258,7 @@
hookfn = getattr(mod, cmd)
else:
hookfn = cmd[7:].strip()
- r, raised = _pythonhook(ui, repo, htype, hname, hookfn, args,
+ r, raised = pythonhook(ui, repo, htype, hname, hookfn, args,
throw)
else:
r = _exthook(ui, repo, htype, hname, cmd, args, throw)
--- a/mercurial/httppeer.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/httppeer.py Wed Jun 06 13:31:24 2018 -0400
@@ -13,7 +13,6 @@
import os
import socket
import struct
-import tempfile
import weakref
from .i18n import _
@@ -519,7 +518,7 @@
filename = None
try:
# dump bundle to disk
- fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
+ fd, filename = pycompat.mkstemp(prefix="hg-bundle-", suffix=".hg")
fh = os.fdopen(fd, r"wb")
d = fp.read(4096)
while d:
--- a/mercurial/localrepo.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/localrepo.py Wed Jun 06 13:31:24 2018 -0400
@@ -778,6 +778,10 @@
@repofilecache('dirstate')
def dirstate(self):
+ return self._makedirstate()
+
+ def _makedirstate(self):
+ """Extension point for wrapping the dirstate per-repo."""
sparsematchfn = lambda: sparse.matcher(self)
return dirstate.dirstate(self.vfs, self.ui, self.root,
@@ -1029,11 +1033,7 @@
def nodebookmarks(self, node):
"""return the list of bookmarks pointing to the specified node"""
- marks = []
- for bookmark, n in self._bookmarks.iteritems():
- if n == node:
- marks.append(bookmark)
- return sorted(marks)
+ return self._bookmarks.names(node)
def branchmap(self):
'''returns a dictionary {branch: [branchheads]} with branchheads
--- a/mercurial/lock.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/lock.py Wed Jun 06 13:31:24 2018 -0400
@@ -209,7 +209,7 @@
def __del__(self):
if self.held:
- warnings.warn("use lock.release instead of del lock",
+ warnings.warn(r"use lock.release instead of del lock",
category=DeprecationWarning,
stacklevel=2)
--- a/mercurial/logcmdutil.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/logcmdutil.py Wed Jun 06 13:31:24 2018 -0400
@@ -154,7 +154,9 @@
self.repo = repo
self.buffered = buffered
self._differ = differ or changesetdiffer()
- self.diffopts = diffopts or {}
+ self._diffopts = patch.diffallopts(ui, diffopts)
+ self._includestat = diffopts and diffopts.get('stat')
+ self._includediff = diffopts and diffopts.get('patch')
self.header = {}
self.hunk = {}
self.lastheader = None
@@ -298,16 +300,13 @@
'''
def _showpatch(self, ctx):
- stat = self.diffopts.get('stat')
- diff = self.diffopts.get('patch')
- diffopts = patch.diffallopts(self.ui, self.diffopts)
- if stat:
- self._differ.showdiff(self.ui, ctx, diffopts, stat=True)
- if stat and diff:
+ if self._includestat:
+ self._differ.showdiff(self.ui, ctx, self._diffopts, stat=True)
+ if self._includestat and self._includediff:
self.ui.write("\n")
- if diff:
- self._differ.showdiff(self.ui, ctx, diffopts, stat=False)
- if stat or diff:
+ if self._includediff:
+ self._differ.showdiff(self.ui, ctx, self._diffopts, stat=False)
+ if self._includestat or self._includediff:
self.ui.write("\n")
class changesetformatter(changesetprinter):
@@ -316,6 +315,7 @@
def __init__(self, ui, repo, fm, differ=None, diffopts=None,
buffered=False):
changesetprinter.__init__(self, ui, repo, differ, diffopts, buffered)
+ self._diffopts = patch.difffeatureopts(ui, diffopts, git=True)
self._fm = fm
def close(self):
@@ -367,16 +367,13 @@
fm.data(copies=fm.formatdict(copies,
key='name', value='source'))
- stat = self.diffopts.get('stat')
- diff = self.diffopts.get('patch')
- diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
- if stat:
+ if self._includestat:
self.ui.pushbuffer()
- self._differ.showdiff(self.ui, ctx, diffopts, stat=True)
+ self._differ.showdiff(self.ui, ctx, self._diffopts, stat=True)
fm.data(diffstat=self.ui.popbuffer())
- if diff:
+ if self._includediff:
self.ui.pushbuffer()
- self._differ.showdiff(self.ui, ctx, diffopts, stat=False)
+ self._differ.showdiff(self.ui, ctx, self._diffopts, stat=False)
fm.data(diff=self.ui.popbuffer())
class changesettemplater(changesetprinter):
@@ -868,7 +865,7 @@
for fn in ctx.files():
rename = getrenamed(fn, ctx.rev())
if rename:
- copies.append((fn, rename[0]))
+ copies.append((fn, rename))
edges = edgefn(type, char, state, rev, parents)
firstedge = next(edges)
width = firstedge[2]
@@ -896,7 +893,7 @@
for fn in ctx.files():
rename = getrenamed(fn, rev)
if rename:
- copies.append((fn, rename[0]))
+ copies.append((fn, rename))
displayer.show(ctx, copies=copies)
displayer.flush(ctx)
displayer.close()
--- a/mercurial/logexchange.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/logexchange.py Wed Jun 06 13:31:24 2018 -0400
@@ -112,8 +112,8 @@
# represent the remotepath with user defined path name if exists
for path, url in repo.ui.configitems('paths'):
# remove auth info from user defined url
- url = util.removeauth(url)
- if url == rpath:
+ noauthurl = util.removeauth(url)
+ if url == rpath or noauthurl == rpath:
rpath = path
break
--- a/mercurial/match.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/match.py Wed Jun 06 13:31:24 2018 -0400
@@ -404,7 +404,7 @@
@encoding.strmethod
def __repr__(self):
- return ('<patternmatcher patterns=%r>' % self._pats)
+ return ('<patternmatcher patterns=%r>' % pycompat.bytestr(self._pats))
class includematcher(basematcher):
--- a/mercurial/merge.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/merge.py Wed Jun 06 13:31:24 2018 -0400
@@ -903,6 +903,23 @@
return actions
def _checkcollision(repo, wmf, actions):
+ """
+ Check for case-folding collisions.
+ """
+
+ # If the repo is narrowed, filter out files outside the narrowspec.
+ narrowmatch = repo.narrowmatch()
+ if not narrowmatch.always():
+ wmf = wmf.matches(narrowmatch)
+ if actions:
+ narrowactions = {}
+ for m, actionsfortype in actions.iteritems():
+ narrowactions[m] = []
+ for (f, args, msg) in actionsfortype:
+ if narrowmatch(f):
+ narrowactions[m].append((f, args, msg))
+ actions = narrowactions
+
# build provisional merged manifest up
pmmf = set(wmf)
@@ -1072,6 +1089,33 @@
repo.ui.warn(_("%s: is both a file and a directory\n") % p)
raise error.Abort(_("destination manifest contains path conflicts"))
+def _filternarrowactions(narrowmatch, branchmerge, actions):
+ """
+ Filters out actions that can ignored because the repo is narrowed.
+
+ Raise an exception if the merge cannot be completed because the repo is
+ narrowed.
+ """
+ nooptypes = set(['k']) # TODO: handle with nonconflicttypes
+ nonconflicttypes = set('a am c cm f g r e'.split())
+ # We mutate the items in the dict during iteration, so iterate
+ # over a copy.
+ for f, action in list(actions.items()):
+ if narrowmatch(f):
+ pass
+ elif not branchmerge:
+ del actions[f] # just updating, ignore changes outside clone
+ elif action[0] in nooptypes:
+ del actions[f] # merge does not affect file
+ elif action[0] in nonconflicttypes:
+ raise error.Abort(_('merge affects file \'%s\' outside narrow, '
+ 'which is not yet supported') % f,
+ hint=_('merging in the other direction '
+ 'may work'))
+ else:
+ raise error.Abort(_('conflict in file \'%s\' is outside '
+ 'narrow clone') % f)
+
def manifestmerge(repo, wctx, p2, pa, branchmerge, force, matcher,
acceptremote, followcopies, forcefulldiff=False):
"""
@@ -1256,6 +1300,11 @@
# If we are merging, look for path conflicts.
checkpathconflicts(repo, wctx, p2, actions)
+ narrowmatch = repo.narrowmatch()
+ if not narrowmatch.always():
+ # Updates "actions" in place
+ _filternarrowactions(narrowmatch, branchmerge, actions)
+
return actions, diverge, renamedelete
def _resolvetrivial(repo, wctx, mctx, ancestor, actions):
@@ -1490,27 +1539,6 @@
return (not self.updatedcount and not self.mergedcount
and not self.removedcount and not self.unresolvedcount)
- # TODO remove container emulation once consumers switch to new API.
-
- def __getitem__(self, x):
- util.nouideprecwarn('access merge.update() results by name instead of '
- 'index', '4.6', 2)
- if x == 0:
- return self.updatedcount
- elif x == 1:
- return self.mergedcount
- elif x == 2:
- return self.removedcount
- elif x == 3:
- return self.unresolvedcount
- else:
- raise IndexError('can only access items 0-3')
-
- def __len__(self):
- util.nouideprecwarn('access merge.update() results by name instead of '
- 'index', '4.6', 2)
- return 4
-
def applyupdates(repo, actions, wctx, mctx, overwrite, labels=None):
"""apply the merge action list to the working directory
--- a/mercurial/minifileset.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/minifileset.py Wed Jun 06 13:31:24 2018 -0400
@@ -11,6 +11,7 @@
from . import (
error,
fileset,
+ pycompat,
)
def _compile(tree):
@@ -21,14 +22,15 @@
name = fileset.getpattern(tree, {'path'}, _('invalid file pattern'))
if name.startswith('**'): # file extension test, ex. "**.tar.gz"
ext = name[2:]
- for c in ext:
+ for c in pycompat.bytestr(ext):
if c in '*{}[]?/\\':
raise error.ParseError(_('reserved character: %s') % c)
return lambda n, s: n.endswith(ext)
elif name.startswith('path:'): # directory or full path test
p = name[5:] # prefix
pl = len(p)
- f = lambda n, s: n.startswith(p) and (len(n) == pl or n[pl] == '/')
+ f = lambda n, s: n.startswith(p) and (len(n) == pl
+ or n[pl:pl + 1] == '/')
return f
raise error.ParseError(_("unsupported file pattern: %s") % name,
hint=_('paths must be prefixed with "path:"'))
--- a/mercurial/minirst.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/minirst.py Wed Jun 06 13:31:24 2018 -0400
@@ -169,7 +169,7 @@
if not itemre.match(line1):
return False
if singleline:
- return line2 == '' or line2[0] == ' ' or itemre.match(line2)
+ return line2 == '' or line2[0:1] == ' ' or itemre.match(line2)
else:
return line2.startswith(' ')
--- a/mercurial/obsolete.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/obsolete.py Wed Jun 06 13:31:24 2018 -0400
@@ -853,7 +853,7 @@
def _mutablerevs(repo):
"""the set of mutable revision in the repository"""
- return repo._phasecache.getrevset(repo, (phases.draft, phases.secret))
+ return repo._phasecache.getrevset(repo, phases.mutablephases)
@cachefor('obsolete')
def _computeobsoleteset(repo):
--- a/mercurial/patch.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/patch.py Wed Jun 06 13:31:24 2018 -0400
@@ -18,7 +18,6 @@
import posixpath
import re
import shutil
-import tempfile
import zlib
from .i18n import _
@@ -51,7 +50,7 @@
gitre = re.compile(br'diff --git a/(.*) b/(.*)')
tabsplitter = re.compile(br'(\t+|[^\t]+)')
wordsplitter = re.compile(br'(\t+| +|[a-zA-Z0-9_\x80-\xff]+|'
- '[^ \ta-zA-Z0-9_\x80-\xff])')
+ b'[^ \ta-zA-Z0-9_\x80-\xff])')
PatchError = error.PatchError
@@ -211,7 +210,7 @@
Any item can be missing from the dictionary. If filename is missing,
fileobj did not contain a patch. Caller must unlink filename when done.'''
- fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
+ fd, tmpname = pycompat.mkstemp(prefix='hg-patch-')
tmpfp = os.fdopen(fd, r'wb')
try:
yield _extract(ui, fileobj, tmpname, tmpfp)
@@ -573,7 +572,7 @@
self.size += len(data)
else:
if self.opener is None:
- root = tempfile.mkdtemp(prefix='hg-patch-')
+ root = pycompat.mkdtemp(prefix='hg-patch-')
self.opener = vfsmod.vfs(root)
# Avoid filename issues with these simple names
fn = '%d' % self.created
@@ -1109,7 +1108,7 @@
all lines of the hunk are removed, then the edit is aborted and
the hunk is left unchanged.
""")
- (patchfd, patchfn) = tempfile.mkstemp(prefix="hg-editor-",
+ (patchfd, patchfn) = pycompat.mkstemp(prefix="hg-editor-",
suffix=".diff")
ncpatchfp = None
try:
@@ -1946,7 +1945,7 @@
"""
def deltahead(binchunk):
i = 0
- for c in binchunk:
+ for c in pycompat.bytestr(binchunk):
i += 1
if not (ord(c) & 0x80):
return i
@@ -1958,31 +1957,31 @@
binchunk = binchunk[s:]
i = 0
while i < len(binchunk):
- cmd = ord(binchunk[i])
+ cmd = ord(binchunk[i:i + 1])
i += 1
if (cmd & 0x80):
offset = 0
size = 0
if (cmd & 0x01):
- offset = ord(binchunk[i])
+ offset = ord(binchunk[i:i + 1])
i += 1
if (cmd & 0x02):
- offset |= ord(binchunk[i]) << 8
+ offset |= ord(binchunk[i:i + 1]) << 8
i += 1
if (cmd & 0x04):
- offset |= ord(binchunk[i]) << 16
+ offset |= ord(binchunk[i:i + 1]) << 16
i += 1
if (cmd & 0x08):
- offset |= ord(binchunk[i]) << 24
+ offset |= ord(binchunk[i:i + 1]) << 24
i += 1
if (cmd & 0x10):
- size = ord(binchunk[i])
+ size = ord(binchunk[i:i + 1])
i += 1
if (cmd & 0x20):
- size |= ord(binchunk[i]) << 8
+ size |= ord(binchunk[i:i + 1]) << 8
i += 1
if (cmd & 0x40):
- size |= ord(binchunk[i]) << 16
+ size |= ord(binchunk[i:i + 1]) << 16
i += 1
if size == 0:
size = 0x10000
@@ -2113,6 +2112,7 @@
args.append('-d %s' % procutil.shellquote(cwd))
cmd = ('%s %s -p%d < %s'
% (patcher, ' '.join(args), strip, procutil.shellquote(patchname)))
+ ui.debug('Using external patch tool: %s\n' % cmd)
fp = procutil.popen(cmd, 'rb')
try:
for line in util.iterfile(fp):
@@ -2492,14 +2492,14 @@
chompline = line.rstrip('\n')
# highlight tabs and trailing whitespace
stripline = chompline.rstrip()
- if line[0] == '-':
+ if line.startswith('-'):
label = 'diff.deleted'
- elif line[0] == '+':
+ elif line.startswith('+'):
label = 'diff.inserted'
else:
raise error.ProgrammingError('unexpected hunk line: %s' % line)
for token in tabsplitter.findall(stripline):
- if '\t' == token[0]:
+ if token.startswith('\t'):
yield (token, 'diff.tab')
else:
yield (token, label)
--- a/mercurial/phases.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/phases.py Wed Jun 06 13:31:24 2018 -0400
@@ -126,6 +126,8 @@
allphases = public, draft, secret = range(3)
trackedphases = allphases[1:]
phasenames = ['public', 'draft', 'secret']
+mutablephases = tuple(allphases[1:])
+remotehiddenphases = tuple(allphases[2:])
def _readroots(repo, phasedefaults=None):
"""Read phase roots from disk
@@ -352,10 +354,14 @@
_trackphasechange(phasetracking, rev, None, revphase)
repo.invalidatevolatilesets()
- def advanceboundary(self, repo, tr, targetphase, nodes):
+ def advanceboundary(self, repo, tr, targetphase, nodes, dryrun=None):
"""Set all 'nodes' to phase 'targetphase'
Nodes with a phase lower than 'targetphase' are not affected.
+
+ If dryrun is True, no actions will be performed
+
+ Returns a set of revs whose phase is changed or should be changed
"""
# Be careful to preserve shallow-copied values: do not update
# phaseroots values, replace them.
@@ -366,6 +372,7 @@
repo = repo.unfiltered()
+ changes = set() # set of revisions to be changed
delroots = [] # set of root deleted by this path
for phase in xrange(targetphase + 1, len(allphases)):
# filter nodes that are not in a compatible phase already
@@ -377,6 +384,9 @@
olds = self.phaseroots[phase]
affected = repo.revs('%ln::%ln', olds, nodes)
+ changes.update(affected)
+ if dryrun:
+ continue
for r in affected:
_trackphasechange(phasetracking, r, self.phase(repo, r),
targetphase)
@@ -387,10 +397,12 @@
self._updateroots(phase, roots, tr)
# some roots may need to be declared for lower phases
delroots.extend(olds - roots)
- # declare deleted root in the target phase
- if targetphase != 0:
- self._retractboundary(repo, tr, targetphase, delroots)
- repo.invalidatevolatilesets()
+ if not dryrun:
+ # declare deleted root in the target phase
+ if targetphase != 0:
+ self._retractboundary(repo, tr, targetphase, delroots)
+ repo.invalidatevolatilesets()
+ return changes
def retractboundary(self, repo, tr, targetphase, nodes):
oldroots = self.phaseroots[:targetphase + 1]
@@ -478,16 +490,24 @@
# (see branchmap one)
self.invalidate()
-def advanceboundary(repo, tr, targetphase, nodes):
+def advanceboundary(repo, tr, targetphase, nodes, dryrun=None):
"""Add nodes to a phase changing other nodes phases if necessary.
This function move boundary *forward* this means that all nodes
are set in the target phase or kept in a *lower* phase.
- Simplify boundary to contains phase roots only."""
+ Simplify boundary to contains phase roots only.
+
+ If dryrun is True, no actions will be performed
+
+ Returns a set of revs whose phase is changed or should be changed
+ """
phcache = repo._phasecache.copy()
- phcache.advanceboundary(repo, tr, targetphase, nodes)
- repo._phasecache.replace(phcache)
+ changes = phcache.advanceboundary(repo, tr, targetphase, nodes,
+ dryrun=dryrun)
+ if not dryrun:
+ repo._phasecache.replace(phcache)
+ return changes
def retractboundary(repo, tr, targetphase, nodes):
"""Set nodes back to a phase changing other nodes phases if
--- a/mercurial/policy.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/policy.py Wed Jun 06 13:31:24 2018 -0400
@@ -69,7 +69,7 @@
(r'cext', r'bdiff'): 3,
(r'cext', r'mpatch'): 1,
(r'cext', r'osutil'): 4,
- (r'cext', r'parsers'): 4,
+ (r'cext', r'parsers'): 5,
}
# map import request to other package or module
--- a/mercurial/posix.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/posix.py Wed Jun 06 13:31:24 2018 -0400
@@ -216,7 +216,7 @@
# check directly in path and don't leave checkisexec behind
checkdir = path
checkisexec = None
- fh, fn = tempfile.mkstemp(dir=checkdir, prefix='hg-checkexec-')
+ fh, fn = pycompat.mkstemp(dir=checkdir, prefix='hg-checkexec-')
try:
os.close(fh)
m = os.stat(fn).st_mode
@@ -249,16 +249,15 @@
else:
checkdir = path
cachedir = None
- fscheckdir = pycompat.fsdecode(checkdir)
- name = tempfile.mktemp(dir=fscheckdir,
+ name = tempfile.mktemp(dir=pycompat.fsdecode(checkdir),
prefix=r'checklink-')
name = pycompat.fsencode(name)
try:
fd = None
if cachedir is None:
- fd = tempfile.NamedTemporaryFile(dir=fscheckdir,
- prefix=r'hg-checklink-')
- target = pycompat.fsencode(os.path.basename(fd.name))
+ fd = pycompat.namedtempfile(dir=checkdir,
+ prefix='hg-checklink-')
+ target = os.path.basename(fd.name)
else:
# create a fixed file to link to; doesn't matter if it
# already exists.
@@ -287,7 +286,7 @@
return True
except OSError as inst:
# link creation might race, try again
- if inst[0] == errno.EEXIST:
+ if inst.errno == errno.EEXIST:
continue
raise
finally:
@@ -297,7 +296,7 @@
return False
except OSError as inst:
# sshfs might report failure while successfully creating the link
- if inst[0] == errno.EIO and os.path.exists(name):
+ if inst.errno == errno.EIO and os.path.exists(name):
unlink(name)
return False
--- a/mercurial/pycompat.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/pycompat.py Wed Jun 06 13:31:24 2018 -0400
@@ -15,6 +15,7 @@
import os
import shlex
import sys
+import tempfile
ispy3 = (sys.version_info[0] >= 3)
ispypy = (r'__pypy__' in sys.builtin_module_names)
@@ -23,7 +24,7 @@
import cookielib
import cPickle as pickle
import httplib
- import Queue as _queue
+ import Queue as queue
import SocketServer as socketserver
import xmlrpclib
@@ -36,16 +37,13 @@
import http.cookiejar as cookielib
import http.client as httplib
import pickle
- import queue as _queue
+ import queue as queue
import socketserver
import xmlrpc.client as xmlrpclib
def future_set_exception_info(f, exc_info):
f.set_exception(exc_info[0])
-empty = _queue.Empty
-queue = _queue.Queue
-
def identity(a):
return a
@@ -387,3 +385,18 @@
def gnugetoptb(args, shortlist, namelist):
return _getoptbwrapper(getopt.gnu_getopt, args, shortlist, namelist)
+
+def mkdtemp(suffix=b'', prefix=b'tmp', dir=None):
+ return tempfile.mkdtemp(suffix, prefix, dir)
+
+# text=True is not supported; use util.from/tonativeeol() instead
+def mkstemp(suffix=b'', prefix=b'tmp', dir=None):
+ return tempfile.mkstemp(suffix, prefix, dir)
+
+# mode must include 'b'ytes as encoding= is not supported
+def namedtempfile(mode=b'w+b', bufsize=-1, suffix=b'', prefix=b'tmp', dir=None,
+ delete=True):
+ mode = sysstr(mode)
+ assert r'b' in mode
+ return tempfile.NamedTemporaryFile(mode, bufsize, suffix=suffix,
+ prefix=prefix, dir=dir, delete=delete)
--- a/mercurial/repair.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/repair.py Wed Jun 06 13:31:24 2018 -0400
@@ -405,18 +405,6 @@
else:
ui.write(_('fncache already up to date\n'))
-def stripbmrevset(repo, mark):
- """
- The revset to strip when strip is called with -B mark
-
- Needs to live here so extensions can use it and wrap it even when strip is
- not enabled or not present on a box.
- """
- return repo.revs("ancestors(bookmark(%s)) - "
- "ancestors(head() and not bookmark(%s)) - "
- "ancestors(bookmark() and not bookmark(%s))",
- mark, mark, mark)
-
def deleteobsmarkers(obsstore, indices):
"""Delete some obsmarkers from obsstore and return how many were deleted
--- a/mercurial/repoview.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/repoview.py Wed Jun 06 13:31:24 2018 -0400
@@ -77,8 +77,7 @@
if visibilityexceptions:
hidden -= visibilityexceptions
pfunc = repo.changelog.parentrevs
- mutablephases = (phases.draft, phases.secret)
- mutable = repo._phasecache.getrevset(repo, mutablephases)
+ mutable = repo._phasecache.getrevset(repo, phases.mutablephases)
visible = mutable - hidden
_revealancestors(pfunc, hidden, visible)
@@ -92,13 +91,8 @@
# fast path in simple case to avoid impact of non optimised code
hiddens = filterrevs(repo, 'visible')
if phases.hassecret(repo):
- cl = repo.changelog
- secret = phases.secret
- getphase = repo._phasecache.phase
- first = min(cl.rev(n) for n in repo._phasecache.phaseroots[secret])
- revs = cl.revs(start=first)
- secrets = set(r for r in revs if getphase(repo, r) >= secret)
- return frozenset(hiddens | secrets)
+ secrets = repo._phasecache.getrevset(repo, phases.remotehiddenphases)
+ return frozenset(hiddens | frozenset(secrets))
else:
return hiddens
--- a/mercurial/revlog.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/revlog.py Wed Jun 06 13:31:24 2018 -0400
@@ -305,6 +305,7 @@
grouped by level of easiness.
"""
revlog = self.revlog
+ gdelta = revlog._generaldelta
curr = len(revlog)
prev = curr - 1
p1r, p2r = revlog.rev(p1), revlog.rev(p2)
@@ -316,27 +317,35 @@
# changegroup data into a generaldelta repo. The only time it
# isn't true is if this is the first revision in a delta chain
# or if ``format.generaldelta=true`` disabled ``lazydeltabase``.
- if cachedelta and revlog._generaldelta and revlog._lazydeltabase:
+ if cachedelta and gdelta and revlog._lazydeltabase:
# Assume what we received from the server is a good choice
# build delta will reuse the cache
yield (cachedelta[0],)
tested.add(cachedelta[0])
- if revlog._generaldelta:
+ if gdelta:
# exclude already lazy tested base if any
parents = [p for p in (p1r, p2r)
if p != nullrev and p not in tested]
- if parents and not revlog._aggressivemergedeltas:
- # Pick whichever parent is closer to us (to minimize the
- # chance of having to build a fulltext).
- parents = [max(parents)]
- tested.update(parents)
- yield parents
+
+ if not revlog._aggressivemergedeltas and len(parents) == 2:
+ parents.sort()
+ # To minimize the chance of having to build a fulltext,
+ # pick first whichever parent is closest to us (max rev)
+ yield (parents[1],)
+ # then the other one (min rev) if the first did not fit
+ yield (parents[0],)
+ tested.update(parents)
+ elif len(parents) > 0:
+ # Test all parents (1 or 2), and keep the best candidate
+ yield parents
+ tested.update(parents)
if prev not in tested:
# other approach failed try against prev to hopefully save us a
# fulltext.
yield (prev,)
+ tested.add(prev)
def buildtext(self, revinfo, fh):
"""Builds a fulltext version of a revision
@@ -441,7 +450,7 @@
if revlog.flags(candidaterev) & REVIDX_RAWTEXT_CHANGING_FLAGS:
continue
candidatedelta = self._builddeltainfo(revinfo, candidaterev, fh)
- if revlog._isgooddeltainfo(candidatedelta, revinfo.textlen):
+ if revlog._isgooddeltainfo(candidatedelta, revinfo):
nominateddeltas.append(candidatedelta)
if nominateddeltas:
deltainfo = min(nominateddeltas, key=lambda x: x.deltalen)
@@ -868,10 +877,11 @@
return base
index = self.index
- base = index[rev][3]
- while base != rev:
- rev = base
- base = index[rev][3]
+ iterrev = rev
+ base = index[iterrev][3]
+ while base != iterrev:
+ iterrev = base
+ base = index[iterrev][3]
self._chainbasecache[rev] = base
return base
@@ -1502,42 +1512,51 @@
def shortest(self, node, minlength=1):
"""Find the shortest unambiguous prefix that matches node."""
- def isvalid(test):
+ def isvalid(prefix):
try:
- if self._partialmatch(test) is None:
- return False
-
- try:
- i = int(test)
- # if we are a pure int, then starting with zero will not be
- # confused as a rev; or, obviously, if the int is larger
- # than the value of the tip rev
- if test[0] == '0' or i > len(self):
- return True
- return False
- except ValueError:
- return True
+ node = self._partialmatch(prefix)
except error.RevlogError:
return False
except error.WdirUnsupported:
# single 'ff...' match
return True
+ if node is None:
+ raise LookupError(node, self.indexfile, _('no node'))
+ return True
+
+ def maybewdir(prefix):
+ return all(c == 'f' for c in prefix)
hexnode = hex(node)
- shortest = hexnode
- startlength = max(6, minlength)
- length = startlength
- while True:
- test = hexnode[:length]
- if isvalid(test):
- shortest = test
- if length == minlength or length > startlength:
- return shortest
- length -= 1
- else:
- length += 1
- if len(shortest) <= length:
- return shortest
+
+ def disambiguate(hexnode, minlength):
+ """Disambiguate against wdirid."""
+ for length in range(minlength, 41):
+ prefix = hexnode[:length]
+ if not maybewdir(prefix):
+ return prefix
+
+ if not getattr(self, 'filteredrevs', None):
+ try:
+ length = max(self.index.shortest(node), minlength)
+ return disambiguate(hexnode, length)
+ except RevlogError:
+ if node != wdirid:
+ raise LookupError(node, self.indexfile, _('no node'))
+ except AttributeError:
+ # Fall through to pure code
+ pass
+
+ if node == wdirid:
+ for length in range(minlength, 41):
+ prefix = hexnode[:length]
+ if isvalid(prefix):
+ return prefix
+
+ for length in range(minlength, 41):
+ prefix = hexnode[:length]
+ if isvalid(prefix):
+ return disambiguate(hexnode, length)
def cmp(self, node, text):
"""compare text with a given file revision
@@ -2076,26 +2095,28 @@
return compressor.decompress(data)
- def _isgooddeltainfo(self, d, textlen):
+ def _isgooddeltainfo(self, deltainfo, revinfo):
"""Returns True if the given delta is good. Good means that it is within
the disk span, disk size, and chain length bounds that we know to be
performant."""
- if d is None:
+ if deltainfo is None:
return False
- # - 'd.distance' is the distance from the base revision -- bounding it
- # limits the amount of I/O we need to do.
- # - 'd.compresseddeltalen' is the sum of the total size of deltas we
- # need to apply -- bounding it limits the amount of CPU we consume.
-
+ # - 'deltainfo.distance' is the distance from the base revision --
+ # bounding it limits the amount of I/O we need to do.
+ # - 'deltainfo.compresseddeltalen' is the sum of the total size of
+ # deltas we need to apply -- bounding it limits the amount of CPU
+ # we consume.
+
+ textlen = revinfo.textlen
defaultmax = textlen * 4
maxdist = self._maxdeltachainspan
if not maxdist:
- maxdist = d.distance # ensure the conditional pass
+ maxdist = deltainfo.distance # ensure the conditional pass
maxdist = max(maxdist, defaultmax)
- if (d.distance > maxdist or d.deltalen > textlen or
- d.compresseddeltalen > textlen * 2 or
- (self._maxchainlen and d.chainlen > self._maxchainlen)):
+ if (deltainfo.distance > maxdist or deltainfo.deltalen > textlen or
+ deltainfo.compresseddeltalen > textlen * 2 or
+ (self._maxchainlen and deltainfo.chainlen > self._maxchainlen)):
return False
return True
--- a/mercurial/revset.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/revset.py Wed Jun 06 13:31:24 2018 -0400
@@ -1333,9 +1333,11 @@
else:
rn = None
try:
- pm = repo.changelog._partialmatch(n)
+ pm = scmutil.resolvehexnodeidprefix(repo, n)
if pm is not None:
rn = repo.changelog.rev(pm)
+ except LookupError:
+ pass
except error.WdirUnsupported:
rn = node.wdirrev
--- a/mercurial/scmutil.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/scmutil.py Wed Jun 06 13:31:24 2018 -0400
@@ -104,8 +104,9 @@
return self[6]
def __repr__(self, *args, **kwargs):
- return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
- 'unknown=%r, ignored=%r, clean=%r>') % self)
+ return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
+ r'unknown=%s, ignored=%s, clean=%s>') %
+ tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
def itersubrepos(ctx1, ctx2):
"""find subrepos in ctx1 or ctx2"""
@@ -448,7 +449,32 @@
# _partialmatch() of filtered changelog could take O(len(repo)) time,
# which would be unacceptably slow. so we look for hash collision in
# unfiltered space, which means some hashes may be slightly longer.
- return repo.unfiltered().changelog.shortest(node, minlength)
+ cl = repo.unfiltered().changelog
+
+ def isrev(prefix):
+ try:
+ i = int(prefix)
+ # if we are a pure int, then starting with zero will not be
+ # confused as a rev; or, obviously, if the int is larger
+ # than the value of the tip rev
+ if prefix[0] == '0' or i > len(cl):
+ return False
+ return True
+ except ValueError:
+ return False
+
+ def disambiguate(prefix):
+ """Disambiguate against revnums."""
+ hexnode = hex(node)
+ for length in range(len(prefix), len(hexnode) + 1):
+ prefix = hexnode[:length]
+ if not isrev(prefix):
+ return prefix
+
+ try:
+ return disambiguate(cl.shortest(node, minlength))
+ except error.LookupError:
+ raise error.RepoLookupError()
def isrevsymbol(repo, symbol):
"""Checks if a symbol exists in the repo.
@@ -561,11 +587,6 @@
tree = revsetlang.parse(revspec)
return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
-def revpairnodes(repo, revs):
- repo.ui.deprecwarn("revpairnodes is deprecated, please use revpair", "4.6")
- ctx1, ctx2 = revpair(repo, revs)
- return ctx1.node(), ctx2.node()
-
def revpair(repo, revs):
if not revs:
return repo['.'], repo[None]
@@ -1139,7 +1160,8 @@
def __call__(self, func):
self.func = func
- self.name = func.__name__.encode('ascii')
+ self.sname = func.__name__
+ self.name = pycompat.sysbytes(self.sname)
return self
def __get__(self, obj, type=None):
@@ -1147,9 +1169,9 @@
if obj is None:
return self
# do we need to check if the file changed?
- if self.name in obj.__dict__:
+ if self.sname in obj.__dict__:
assert self.name in obj._filecache, self.name
- return obj.__dict__[self.name]
+ return obj.__dict__[self.sname]
entry = obj._filecache.get(self.name)
@@ -1166,7 +1188,7 @@
obj._filecache[self.name] = entry
- obj.__dict__[self.name] = entry.obj
+ obj.__dict__[self.sname] = entry.obj
return entry.obj
def __set__(self, obj, value):
@@ -1180,13 +1202,13 @@
ce = obj._filecache[self.name]
ce.obj = value # update cached copy
- obj.__dict__[self.name] = value # update copy returned by obj.x
+ obj.__dict__[self.sname] = value # update copy returned by obj.x
def __delete__(self, obj):
try:
- del obj.__dict__[self.name]
+ del obj.__dict__[self.sname]
except KeyError:
- raise AttributeError(self.name)
+ raise AttributeError(self.sname)
def extdatasource(repo, source):
"""Gather a map of rev -> value dict from the specified source
@@ -1460,6 +1482,24 @@
revrange = '%s:%s' % (minrev, maxrev)
repo.ui.status(_('new changesets %s\n') % revrange)
+ @reportsummary
+ def reportphasechanges(repo, tr):
+ """Report statistics of phase changes for changesets pre-existing
+ pull/unbundle.
+ """
+ newrevs = tr.changes.get('revs', xrange(0, 0))
+ phasetracking = tr.changes.get('phases', {})
+ if not phasetracking:
+ return
+ published = [
+ rev for rev, (old, new) in phasetracking.iteritems()
+ if new == phases.public and rev not in newrevs
+ ]
+ if not published:
+ return
+ repo.ui.status(_('%d changesets became public\n')
+ % len(published))
+
def nodesummaries(repo, nodes, maxnumnodes=4):
if len(nodes) <= maxnumnodes or repo.ui.verbose:
return ' '.join(short(h) for h in nodes)
@@ -1538,7 +1578,6 @@
unficl = unfi.changelog
cl = repo.changelog
tiprev = len(unficl)
- pmatch = unficl._partialmatch
allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
for s in symbols:
try:
@@ -1554,7 +1593,7 @@
pass
try:
- s = pmatch(s)
+ s = resolvehexnodeidprefix(unfi, s)
except (error.LookupError, error.WdirUnsupported):
s = None
@@ -1564,3 +1603,12 @@
revs.add(rev)
return revs
+
+def bookmarkrevs(repo, mark):
+ """
+ Select revisions reachable by a given bookmark
+ """
+ return repo.revs("ancestors(bookmark(%s)) - "
+ "ancestors(head() and not bookmark(%s)) - "
+ "ancestors(bookmark() and not bookmark(%s))",
+ mark, mark, mark)
--- a/mercurial/server.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/server.py Wed Jun 06 13:31:24 2018 -0400
@@ -8,7 +8,6 @@
from __future__ import absolute_import
import os
-import tempfile
from .i18n import _
@@ -72,7 +71,7 @@
if opts['daemon'] and not opts['daemon_postexec']:
# Signal child process startup with file removal
- lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
+ lockfd, lockpath = pycompat.mkstemp(prefix='hg-service-')
os.close(lockfd)
try:
if not runargs:
--- a/mercurial/sslutil.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/sslutil.py Wed Jun 06 13:31:24 2018 -0400
@@ -640,9 +640,9 @@
return _('no certificate received')
dnsnames = []
- san = cert.get('subjectAltName', [])
+ san = cert.get(r'subjectAltName', [])
for key, value in san:
- if key == 'DNS':
+ if key == r'DNS':
try:
if _dnsnamematch(value, hostname):
return
@@ -672,6 +672,7 @@
dnsnames.append(value)
+ dnsnames = [pycompat.bytesurl(d) for d in dnsnames]
if len(dnsnames) > 1:
return _('certificate is for %s') % ', '.join(dnsnames)
elif len(dnsnames) == 1:
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/state.py Wed Jun 06 13:31:24 2018 -0400
@@ -0,0 +1,84 @@
+# state.py - writing and reading state files in Mercurial
+#
+# Copyright 2018 Pulkit Goyal <pulkitmgoyal@gmail.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+"""
+This file contains class to wrap the state for commands and other
+related logic.
+
+All the data related to the command state is stored as dictionary in the object.
+The class has methods using which the data can be stored to disk in a file under
+.hg/ directory.
+
+We store the data on disk in cbor, for which we use the third party cbor library
+to serialize and deserialize data.
+"""
+
+from __future__ import absolute_import
+
+from .thirdparty import cbor
+
+from . import (
+ error,
+ util,
+)
+
+class cmdstate(object):
+ """a wrapper class to store the state of commands like `rebase`, `graft`,
+ `histedit`, `shelve` etc. Extensions can also use this to write state files.
+
+ All the data for the state is stored in the form of key-value pairs in a
+ dictionary.
+
+ The class object can write all the data to a file in .hg/ directory and
+ can populate the object data reading that file.
+
+ Uses cbor to serialize and deserialize data while writing and reading from
+ disk.
+ """
+
+ def __init__(self, repo, fname):
+ """ repo is the repo object
+ fname is the file name in which data should be stored in .hg directory
+ """
+ self._repo = repo
+ self.fname = fname
+
+ def read(self):
+ """read the existing state file and return a dict of data stored"""
+ return self._read()
+
+ def save(self, version, data):
+ """write all the state data stored to .hg/<filename> file
+
+ we use third-party library cbor to serialize data to write in the file.
+ """
+ if not isinstance(version, int):
+ raise error.ProgrammingError("version of state file should be"
+ " an integer")
+
+ with self._repo.vfs(self.fname, 'wb', atomictemp=True) as fp:
+ fp.write('%d\n' % version)
+ cbor.dump(data, fp, canonical=True)
+
+ def _read(self):
+ """reads the state file and returns a dictionary which contain
+ data in the same format as it was before storing"""
+ with self._repo.vfs(self.fname, 'rb') as fp:
+ try:
+ int(fp.readline())
+ except ValueError:
+ raise error.CorruptedState("unknown version of state file"
+ " found")
+ return cbor.load(fp)
+
+ def delete(self):
+ """drop the state file if exists"""
+ util.unlinkpath(self._repo.vfs.join(self.fname), ignoremissing=True)
+
+ def exists(self):
+ """check whether the state file exists or not"""
+ return self._repo.vfs.exists(self.fname)
--- a/mercurial/statprof.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/statprof.py Wed Jun 06 13:31:24 2018 -0400
@@ -112,7 +112,6 @@
import os
import signal
import sys
-import tempfile
import threading
import time
@@ -691,7 +690,7 @@
file=fp)
return
- fd, path = tempfile.mkstemp()
+ fd, path = pycompat.mkstemp()
file = open(path, "w+")
--- a/mercurial/streamclone.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/streamclone.py Wed Jun 06 13:31:24 2018 -0400
@@ -10,7 +10,6 @@
import contextlib
import os
import struct
-import tempfile
import warnings
from .i18n import _
@@ -19,6 +18,7 @@
cacheutil,
error,
phases,
+ pycompat,
store,
util,
)
@@ -469,7 +469,7 @@
files = []
try:
def copy(src):
- fd, dst = tempfile.mkstemp()
+ fd, dst = pycompat.mkstemp()
os.close(fd)
files.append(dst)
util.copyfiles(src, dst, hardlink=True)
--- a/mercurial/templatefilters.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/templatefilters.py Wed Jun 06 13:31:24 2018 -0400
@@ -99,6 +99,45 @@
"""
return os.path.basename(path)
+@templatefilter('commonprefix')
+def commonprefix(filelist):
+ """List of text. Treats each list item as file name with /
+ as path separator and returns the longest common directory
+ prefix shared by all list items.
+ Returns the empty string if no common prefix exists.
+
+ The list items are not normalized, i.e. "foo/../bar" is handled as
+ file "bar" in the directory "foo/..". Leading slashes are ignored.
+
+ For example, ["foo/bar/baz", "foo/baz/bar"] becomes "foo" and
+ ["foo/bar", "baz"] becomes "".
+ """
+ def common(a, b):
+ if len(a) > len(b):
+ a = b[:len(a)]
+ elif len(b) > len(a):
+ b = b[:len(a)]
+ if a == b:
+ return a
+ for i in xrange(len(a)):
+ if a[i] != b[i]:
+ return a[:i]
+ return a
+ try:
+ if not filelist:
+ return ""
+ dirlist = [f.lstrip('/').split('/')[:-1] for f in filelist]
+ if len(dirlist) == 1:
+ return '/'.join(dirlist[0])
+ a = min(dirlist)
+ b = max(dirlist)
+ # The common prefix of a and b is shared with all
+ # elements of the list since Python sorts lexicographical
+ # and [1, x] after [1].
+ return '/'.join(common(a, b))
+ except TypeError:
+ raise error.ParseError(_('argument is not a list of text'))
+
@templatefilter('count')
def count(i):
"""List or text. Returns the length as an integer."""
@@ -238,6 +277,7 @@
@templatefilter('json')
def json(obj, paranoid=True):
+ """Any object. Serializes the object to a JSON formatted text."""
if obj is None:
return 'null'
elif obj is False:
@@ -248,13 +288,9 @@
return pycompat.bytestr(obj)
elif isinstance(obj, bytes):
return '"%s"' % encoding.jsonescape(obj, paranoid=paranoid)
- elif isinstance(obj, str):
- # This branch is unreachable on Python 2, because bytes == str
- # and we'll return in the next-earlier block in the elif
- # ladder. On Python 3, this helps us catch bugs before they
- # hurt someone.
+ elif isinstance(obj, type(u'')):
raise error.ProgrammingError(
- 'Mercurial only does output with bytes on Python 3: %r' % obj)
+ 'Mercurial only does output with bytes: %r' % obj)
elif util.safehasattr(obj, 'keys'):
out = ['"%s": %s' % (encoding.jsonescape(k, paranoid=paranoid),
json(v, paranoid))
--- a/mercurial/templatefuncs.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/templatefuncs.py Wed Jun 06 13:31:24 2018 -0400
@@ -12,6 +12,7 @@
from .i18n import _
from .node import (
bin,
+ wdirid,
)
from . import (
color,
@@ -35,6 +36,7 @@
)
evalrawexp = templateutil.evalrawexp
+evalwrapped = templateutil.evalwrapped
evalfuncarg = templateutil.evalfuncarg
evalboolean = templateutil.evalboolean
evaldate = templateutil.evaldate
@@ -112,6 +114,13 @@
raise error.ParseError(_('extdata expects one argument'))
source = evalstring(context, mapping, args['source'])
+ if not source:
+ sym = templateutil.findsymbolicname(args['source'])
+ if sym:
+ raise error.ParseError(_('empty data source specified'),
+ hint=_("did you mean extdata('%s')?") % sym)
+ else:
+ raise error.ParseError(_('empty data source specified'))
cache = context.resource(mapping, 'cache').setdefault('extdata', {})
ctx = context.resource(mapping, 'ctx')
if source in cache:
@@ -319,16 +328,11 @@
# i18n: "join" is a keyword
raise error.ParseError(_("join expects one or two arguments"))
- joinset = evalrawexp(context, mapping, args[0])
+ joinset = evalwrapped(context, mapping, args[0])
joiner = " "
if len(args) > 1:
joiner = evalstring(context, mapping, args[1])
- if isinstance(joinset, templateutil.wrapped):
- return joinset.join(context, mapping, joiner)
- # TODO: perhaps a generator should be stringify()-ed here, but we can't
- # because hgweb abuses it as a keyword that returns a list of dicts.
- joinset = templateutil.unwrapvalue(context, mapping, joinset)
- return templateutil.joinitems(pycompat.maybebytestr(joinset), joiner)
+ return joinset.join(context, mapping, joiner)
@templatefunc('label(label, expr)')
def label(context, mapping, args):
@@ -352,7 +356,9 @@
"""The global tags matching the given pattern on the
most recent globally tagged ancestor of this changeset.
If no such tags exist, the "{tag}" template resolves to
- the string "null"."""
+ the string "null". See :hg:`help revisions.patterns` for the pattern
+ syntax.
+ """
if len(args) > 1:
# i18n: "latesttag" is a keyword
raise error.ParseError(_("latesttag expects at most one argument"))
@@ -601,11 +607,16 @@
else:
try:
node = scmutil.resolvehexnodeidprefix(repo, hexnode)
- except (error.LookupError, error.WdirUnsupported):
+ except error.WdirUnsupported:
+ node = wdirid
+ except error.LookupError:
return hexnode
if not node:
return hexnode
- return scmutil.shortesthexnodeidprefix(repo, node, minlength)
+ try:
+ return scmutil.shortesthexnodeidprefix(repo, node, minlength)
+ except error.RepoLookupError:
+ return hexnode
@templatefunc('strip(text[, chars])')
def strip(context, mapping, args):
--- a/mercurial/templatekw.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/templatekw.py Wed Jun 06 13:31:24 2018 -0400
@@ -38,34 +38,6 @@
compatlist = templateutil.compatlist
_showcompatlist = templateutil._showcompatlist
-def _showlist(name, values, templ, mapping, plural=None, separator=' '):
- ui = mapping.get('ui')
- if ui:
- ui.deprecwarn("templatekw._showlist() is deprecated, use "
- "templateutil._showcompatlist()", '4.6')
- context = templ # this is actually a template context, not a templater
- return _showcompatlist(context, mapping, name, values, plural, separator)
-
-def showdict(name, data, mapping, plural=None, key='key', value='value',
- fmt=None, separator=' '):
- ui = mapping.get('ui')
- if ui:
- ui.deprecwarn("templatekw.showdict() is deprecated, use "
- "templateutil.compatdict()", '4.6')
- c = [{key: k, value: v} for k, v in data.iteritems()]
- f = _showlist(name, c, mapping['templ'], mapping, plural, separator)
- return hybriddict(data, key=key, value=value, fmt=fmt, gen=f)
-
-def showlist(name, values, mapping, plural=None, element=None, separator=' '):
- ui = mapping.get('ui')
- if ui:
- ui.deprecwarn("templatekw.showlist() is deprecated, use "
- "templateutil.compatlist()", '4.6')
- if not element:
- element = name
- f = _showlist(name, values, mapping['templ'], mapping, plural, separator)
- return hybridlist(values, name=element, gen=f)
-
def getlatesttags(context, mapping, pattern=None):
'''return date, distance and name for the latest tag of rev'''
repo = context.resource(mapping, 'repo')
@@ -139,7 +111,7 @@
for i in fl:
lr = fl.linkrev(i)
renamed = fl.renamed(fl.node(i))
- rcache[fn][lr] = renamed
+ rcache[fn][lr] = renamed and renamed[0]
if lr >= endrev:
break
if rev in rcache[fn]:
@@ -148,7 +120,8 @@
# If linkrev != rev (i.e. rev not found in rcache) fallback to
# filectx logic.
try:
- return repo[rev][fn].renamed()
+ renamed = repo[rev][fn].renamed()
+ return renamed and renamed[0]
except error.LookupError:
return None
@@ -278,6 +251,8 @@
if isinstance(s, encoding.localstr):
# try hard to preserve utf-8 bytes
return encoding.tolocal(encoding.fromlocal(s).strip())
+ elif isinstance(s, encoding.safelocalstr):
+ return encoding.safelocalstr(s.strip())
else:
return s.strip()
@@ -344,7 +319,7 @@
for fn in ctx.files():
rename = getrenamed(fn, ctx.rev())
if rename:
- copies.append((fn, rename[0]))
+ copies.append((fn, rename))
copies = util.sortdict(copies)
return compatdict(context, mapping, 'file_copy', copies,
@@ -392,12 +367,19 @@
return getgraphnode(repo, ctx)
def getgraphnode(repo, ctx):
+ return getgraphnodecurrent(repo, ctx) or getgraphnodesymbol(ctx)
+
+def getgraphnodecurrent(repo, ctx):
wpnodes = repo.dirstate.parents()
if wpnodes[1] == nullid:
wpnodes = wpnodes[:1]
if ctx.node() in wpnodes:
return '@'
- elif ctx.obsolete():
+ else:
+ return ''
+
+def getgraphnodesymbol(ctx):
+ if ctx.obsolete():
return 'x'
elif ctx.isunstable():
return '*'
--- a/mercurial/templater.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/templater.py Wed Jun 06 13:31:24 2018 -0400
@@ -26,15 +26,15 @@
values of any printable types, and will be folded by ``stringify()``
or ``flatten()``.
- BUG: hgweb overloads this type for mappings (i.e. some hgweb keywords
- returns a generator of dicts.)
-
None
sometimes represents an empty value, which can be stringified to ''.
True, False, int, float
can be stringified as such.
+wrappedbytes, wrappedvalue
+ a wrapper for the above printable types.
+
date tuple
a (unixtime, offset) tuple, which produces no meaningful output by itself.
@@ -253,7 +253,8 @@
p = parser.parser(elements)
try:
while pos < stop:
- n = min((tmpl.find(c, pos, stop) for c in sepchars),
+ n = min((tmpl.find(c, pos, stop)
+ for c in pycompat.bytestr(sepchars)),
key=lambda n: (n < 0, n))
if n < 0:
yield ('string', unescape(tmpl[pos:stop]), pos)
--- a/mercurial/templates/gitweb/manifest.tmpl Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/templates/gitweb/manifest.tmpl Wed Jun 06 13:31:24 2018 -0400
@@ -30,13 +30,7 @@
<div class="title">{path|escape} {alltags}</div>
<table cellspacing="0">
-<tr class="parity{upparity}">
-<td style="font-family:monospace">drwxr-xr-x</td>
-<td style="font-family:monospace"></td>
-<td style="font-family:monospace"></td>
-<td><a href="{url|urlescape}file/{symrev}{up|urlescape}{sessionvars%urlparameter}">[up]</a></td>
-<td class="link"> </td>
-</tr>
+{ifeq(path, up, '', updirentry)}
{dentries%direntry}
{fentries%fileentry}
</table>
--- a/mercurial/templates/gitweb/map Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/templates/gitweb/map Wed Jun 06 13:31:24 2018 -0400
@@ -59,6 +59,16 @@
changelogentry = changelogentry.tmpl
changeset = changeset.tmpl
manifest = manifest.tmpl
+updirentry = '
+ <tr class="parity{upparity}">
+ <td style="font-family:monospace">drwxr-xr-x</td>
+ <td style="font-family:monospace"></td>
+ <td style="font-family:monospace"></td>
+ <td>
+ <a href="{url|urlescape}file/{symrev}{up|urlescape}{sessionvars%urlparameter}">[up]</a>
+ </td>
+ <td class="link"> </td>
+ </tr>'
direntry = '
<tr class="parity{parity}">
<td style="font-family:monospace">drwxr-xr-x</td>
--- a/mercurial/templates/monoblue/manifest.tmpl Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/templates/monoblue/manifest.tmpl Wed Jun 06 13:31:24 2018 -0400
@@ -33,13 +33,7 @@
<p class="files">{path|escape} {alltags}</p>
<table>
- <tr class="parity{upparity}">
- <td>drwxr-xr-x</td>
- <td></td>
- <td></td>
- <td><a href="{url|urlescape}file/{symrev}{up|urlescape}{sessionvars%urlparameter}">[up]</a></td>
- <td class="link"> </td>
- </tr>
+ {ifeq(path, up, '', updirentry)}
{dentries%direntry}
{fentries%fileentry}
</table>
--- a/mercurial/templates/monoblue/map Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/templates/monoblue/map Wed Jun 06 13:31:24 2018 -0400
@@ -59,6 +59,16 @@
changelogentry = changelogentry.tmpl
changeset = changeset.tmpl
manifest = manifest.tmpl
+updirentry = '
+ <tr class="parity{upparity}">
+ <td>drwxr-xr-x</td>
+ <td></td>
+ <td></td>
+ <td>
+ <a href="{url|urlescape}file/{symrev}{up|urlescape}{sessionvars%urlparameter}">[up]</a>
+ </td>
+ <td class="link"> </td>
+ </tr>'
direntry = '
<tr class="parity{parity}">
<td>drwxr-xr-x</td>
--- a/mercurial/templates/paper/manifest.tmpl Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/templates/paper/manifest.tmpl Wed Jun 06 13:31:24 2018 -0400
@@ -46,11 +46,7 @@
</tr>
</thead>
<tbody class="stripes2">
-<tr class="fileline">
- <td class="name"><a href="{url|urlescape}file/{symrev}{up|urlescape}{sessionvars%urlparameter}">[up]</a></td>
- <td class="size"></td>
- <td class="permissions">drwxr-xr-x</td>
-</tr>
+{ifeq(path, up, '', updirentry)}
{dentries%direntry}
{fentries%fileentry}
</tbody>
--- a/mercurial/templates/paper/map Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/templates/paper/map Wed Jun 06 13:31:24 2018 -0400
@@ -41,6 +41,15 @@
navgraph = '{before%navgraphentry}{after%navgraphentry}'
filenav = '{before%filenaventry}{after%filenaventry}'
+updirentry = '
+ <tr class="fileline">
+ <td class="name">
+ <a href="{url|urlescape}file/{symrev}{up|urlescape}{sessionvars%urlparameter}">[up]</a>
+ </td>
+ <td class="size"></td>
+ <td class="permissions">drwxr-xr-x</td>
+ </tr>'
+
direntry = '
<tr class="fileline">
<td class="name">
--- a/mercurial/templates/spartan/manifest.tmpl Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/templates/spartan/manifest.tmpl Wed Jun 06 13:31:24 2018 -0400
@@ -17,12 +17,7 @@
<h2><a href="/">Mercurial</a> {pathdef%breadcrumb} / files for changeset <a href="{url|urlescape}rev/{node|short}">{node|short}</a>: {path|escape}</h2>
<table cellpadding="0" cellspacing="0">
-<tr class="parity{upparity}">
- <td><tt>drwxr-xr-x</tt>
- <td>
- <td>
- <td><a href="{url|urlescape}file/{symrev}{up|urlescape}{sessionvars%urlparameter}">[up]</a>
-</tr>
+{ifeq(path, up, '', updirentry)}
{dentries%direntry}
{fentries%fileentry}
</table>
--- a/mercurial/templates/spartan/map Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/templates/spartan/map Wed Jun 06 13:31:24 2018 -0400
@@ -25,23 +25,36 @@
navgraph = '{before%navgraphentry}{after%navgraphentry}'
filenav = '{before%filenaventry}{after%filenaventry}'
+updirentry = '
+ <tr class="parity{upparity}">
+ <td><tt>drwxr-xr-x</tt> </td>
+ <td> </td>
+ <td> </td>
+ <td>
+ <a href="{url|urlescape}file/{symrev}{up|urlescape}{sessionvars%urlparameter}">[up]</a>
+ </td>
+ </tr> '
+
direntry = '
<tr class="parity{parity}">
- <td><tt>drwxr-xr-x</tt>
- <td>
- <td>
+ <td><tt>drwxr-xr-x</tt> </td>
+ <td> </td>
+ <td> </td>
<td>
<a href="{url|urlescape}file/{symrev}{path|urlescape}{sessionvars%urlparameter}">{basename|escape}/</a>
<a href="{url|urlescape}file/{symrev}{path|urlescape}/{emptydirs|urlescape}{sessionvars%urlparameter}">
{emptydirs|urlescape}
- </a>'
+ </a>
+ </td>
+ </tr>'
fileentry = '
<tr class="parity{parity}">
- <td><tt>{permissions|permissions}</tt>
- <td align=right><tt class="date">{date|isodate}</tt>
- <td align=right><tt>{size}</tt>
- <td><a href="{url|urlescape}file/{symrev}/{file|urlescape}{sessionvars%urlparameter}">{basename|escape}</a>'
+ <td><tt>{permissions|permissions}</tt> </td>
+ <td align=right><tt class="date">{date|isodate}</tt> </td>
+ <td align=right><tt>{size}</tt> </td>
+ <td><a href="{url|urlescape}file/{symrev}/{file|urlescape}{sessionvars%urlparameter}">{basename|escape}</a></td>
+ </tr>'
filerevision = filerevision.tmpl
fileannotate = fileannotate.tmpl
--- a/mercurial/templateutil.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/templateutil.py Wed Jun 06 13:31:24 2018 -0400
@@ -66,6 +66,44 @@
A returned value must be serializable by templaterfilters.json().
"""
+class wrappedbytes(wrapped):
+ """Wrapper for byte string"""
+
+ def __init__(self, value):
+ self._value = value
+
+ def itermaps(self, context):
+ raise error.ParseError(_('%r is not iterable of mappings')
+ % pycompat.bytestr(self._value))
+
+ def join(self, context, mapping, sep):
+ return joinitems(pycompat.iterbytestr(self._value), sep)
+
+ def show(self, context, mapping):
+ return self._value
+
+ def tovalue(self, context, mapping):
+ return self._value
+
+class wrappedvalue(wrapped):
+ """Generic wrapper for pure non-list/dict/bytes value"""
+
+ def __init__(self, value):
+ self._value = value
+
+ def itermaps(self, context):
+ raise error.ParseError(_('%r is not iterable of mappings')
+ % self._value)
+
+ def join(self, context, mapping, sep):
+ raise error.ParseError(_('%r is not iterable') % self._value)
+
+ def show(self, context, mapping):
+ return pycompat.bytestr(self._value)
+
+ def tovalue(self, context, mapping):
+ return self._value
+
# stub for representing a date type; may be a real date type that can
# provide a readable string value
class date(object):
@@ -151,12 +189,8 @@
yield self.tomap()
def join(self, context, mapping, sep):
- # TODO: just copies the old behavior where a value was a generator
- # yielding one item, but reconsider about it. join() over a string
- # has no consistent result because a string may be a bytes, or a
- # generator yielding an item, or a generator yielding multiple items.
- # Preserving all of the current behaviors wouldn't make any sense.
- return self.show(context, mapping)
+ w = makewrapped(context, mapping, self._value)
+ return w.join(context, mapping, sep)
def show(self, context, mapping):
# TODO: switch gen to (context, mapping) API?
@@ -285,12 +319,6 @@
return thing
return thing.show(context, mapping)
-def unwrapvalue(context, mapping, thing):
- """Move the inner value object out of the wrapper"""
- if not isinstance(thing, wrapped):
- return thing
- return thing.tovalue(context, mapping)
-
def wraphybridvalue(container, key, value):
"""Wrap an element of hybrid container to be mappable
@@ -453,15 +481,28 @@
func, data = arg
return func(context, mapping, data)
+def evalwrapped(context, mapping, arg):
+ """Evaluate given argument to wrapped object"""
+ thing = evalrawexp(context, mapping, arg)
+ return makewrapped(context, mapping, thing)
+
+def makewrapped(context, mapping, thing):
+ """Lift object to a wrapped type"""
+ if isinstance(thing, wrapped):
+ return thing
+ thing = _unthunk(context, mapping, thing)
+ if isinstance(thing, bytes):
+ return wrappedbytes(thing)
+ return wrappedvalue(thing)
+
def evalfuncarg(context, mapping, arg):
"""Evaluate given argument as value type"""
- return _unwrapvalue(context, mapping, evalrawexp(context, mapping, arg))
+ return unwrapvalue(context, mapping, evalrawexp(context, mapping, arg))
-# TODO: unify this with unwrapvalue() once the bug of templatefunc.join()
-# is fixed. we can't do that right now because join() has to take a generator
-# of byte strings as it is, not a lazy byte string.
-def _unwrapvalue(context, mapping, thing):
- thing = unwrapvalue(context, mapping, thing)
+def unwrapvalue(context, mapping, thing):
+ """Move the inner value object out of the wrapper"""
+ if isinstance(thing, wrapped):
+ return thing.tovalue(context, mapping)
# evalrawexp() may return string, generator of strings or arbitrary object
# such as date tuple, but filter does not want generator.
return _unthunk(context, mapping, thing)
@@ -476,7 +517,8 @@
thing = stringutil.parsebool(data)
else:
thing = func(context, mapping, data)
- thing = unwrapvalue(context, mapping, thing)
+ if isinstance(thing, wrapped):
+ thing = thing.tovalue(context, mapping)
if isinstance(thing, bool):
return thing
# other objects are evaluated as strings, which means 0 is True, but
@@ -490,7 +532,7 @@
return unwrapdate(context, mapping, thing, err)
def unwrapdate(context, mapping, thing, err=None):
- thing = _unwrapvalue(context, mapping, thing)
+ thing = unwrapvalue(context, mapping, thing)
try:
return dateutil.parsedate(thing)
except AttributeError:
@@ -505,7 +547,7 @@
return unwrapinteger(context, mapping, thing, err)
def unwrapinteger(context, mapping, thing, err=None):
- thing = _unwrapvalue(context, mapping, thing)
+ thing = unwrapvalue(context, mapping, thing)
try:
return int(thing)
except (TypeError, ValueError):
@@ -525,7 +567,7 @@
return stringify(context, mapping, thing)
_unwrapfuncbytype = {
- None: _unwrapvalue,
+ None: unwrapvalue,
bytes: stringify,
date: unwrapdate,
int: unwrapinteger,
@@ -601,20 +643,6 @@
return (_("template filter '%s' is not compatible with keyword '%s'")
% (fn, sym))
-def _checkeditermaps(darg, d):
- try:
- for v in d:
- if not isinstance(v, dict):
- raise TypeError
- yield v
- except TypeError:
- sym = findsymbolicname(darg)
- if sym:
- raise error.ParseError(_("keyword '%s' is not iterable of mappings")
- % sym)
- else:
- raise error.ParseError(_("%r is not iterable of mappings") % d)
-
def _iteroverlaymaps(context, origmapping, newmappings):
"""Generate combined mappings from the original mapping and an iterable
of partial mappings to override the original"""
@@ -623,23 +651,14 @@
lm['index'] = i
yield lm
-def _applymap(context, mapping, diter, targ):
- for lm in _iteroverlaymaps(context, mapping, diter):
+def _applymap(context, mapping, d, targ):
+ for lm in _iteroverlaymaps(context, mapping, d.itermaps(context)):
yield evalrawexp(context, lm, targ)
def runmap(context, mapping, data):
darg, targ = data
- d = evalrawexp(context, mapping, darg)
- # TODO: a generator should be rejected because it is a thunk of lazy
- # string, but we can't because hgweb abuses generator as a keyword
- # that returns a list of dicts.
- # TODO: drop _checkeditermaps() and pass 'd' to mappedgenerator so it
- # can be restarted.
- if isinstance(d, wrapped):
- diter = d.itermaps(context)
- else:
- diter = _checkeditermaps(darg, d)
- return mappedgenerator(_applymap, args=(mapping, diter, targ))
+ d = evalwrapped(context, mapping, darg)
+ return mappedgenerator(_applymap, args=(mapping, d, targ))
def runmember(context, mapping, data):
darg, memb = data
--- a/mercurial/ui.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/ui.py Wed Jun 06 13:31:24 2018 -0400
@@ -18,7 +18,6 @@
import socket
import subprocess
import sys
-import tempfile
import traceback
from .i18n import _
@@ -66,8 +65,8 @@
update.check = noconflict
# Show conflicts information in `hg status`
status.verbose = True
-# Skip the bisect state in conflicts information in `hg status`
-status.skipstates = bisect
+# Collapse entire directories that contain only unknown files
+status.terse = u
[diff]
git = 1
@@ -1446,7 +1445,7 @@
rdir = None
if self.configbool('experimental', 'editortmpinhg'):
rdir = repopath
- (fd, name) = tempfile.mkstemp(prefix='hg-' + extra['prefix'] + '-',
+ (fd, name) = pycompat.mkstemp(prefix='hg-' + extra['prefix'] + '-',
suffix=suffix,
dir=rdir)
try:
--- a/mercurial/unionrepo.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/unionrepo.py Wed Jun 06 13:31:24 2018 -0400
@@ -49,7 +49,7 @@
for rev2 in self.revlog2:
rev = self.revlog2.index[rev2]
# rev numbers - in revlog2, very different from self.rev
- _start, _csize, _rsize, base, linkrev, p1rev, p2rev, node = rev
+ _start, _csize, rsize, base, linkrev, p1rev, p2rev, node = rev
flags = _start & 0xFFFF
if linkmapper is None: # link is to same revlog
@@ -69,7 +69,9 @@
p1node = self.revlog2.node(p1rev)
p2node = self.revlog2.node(p2rev)
- e = (flags, None, None, base,
+ # TODO: it's probably wrong to set compressed length to None, but
+ # I have no idea if csize is valid in the base revlog context.
+ e = (flags, None, rsize, base,
link, self.rev(p1node), self.rev(p2node), node)
self.index.insert(-1, e)
self.nodemap[node] = n
--- a/mercurial/upgrade.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/upgrade.py Wed Jun 06 13:31:24 2018 -0400
@@ -8,7 +8,6 @@
from __future__ import absolute_import
import stat
-import tempfile
from .i18n import _
from . import (
@@ -18,6 +17,7 @@
hg,
localrepo,
manifest,
+ pycompat,
revlog,
scmutil,
util,
@@ -657,7 +657,7 @@
ui.write(_('data fully migrated to temporary repository\n'))
- backuppath = tempfile.mkdtemp(prefix='upgradebackup.', dir=srcrepo.path)
+ backuppath = pycompat.mkdtemp(prefix='upgradebackup.', dir=srcrepo.path)
backupvfs = vfsmod.vfs(backuppath)
# Make a backup of requires file first, as it is the first to be modified.
@@ -842,7 +842,7 @@
# data. There are less heavyweight ways to do this, but it is easier
# to create a new repo object than to instantiate all the components
# (like the store) separately.
- tmppath = tempfile.mkdtemp(prefix='upgrade.', dir=repo.path)
+ tmppath = pycompat.mkdtemp(prefix='upgrade.', dir=repo.path)
backuppath = None
try:
ui.write(_('creating temporary repository to stage migrated '
--- a/mercurial/util.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/util.py Wed Jun 06 13:31:24 2018 -0400
@@ -31,7 +31,6 @@
import socket
import stat
import sys
-import tempfile
import time
import traceback
import warnings
@@ -47,7 +46,6 @@
urllibcompat,
)
from .utils import (
- dateutil,
procutil,
stringutil,
)
@@ -60,10 +58,8 @@
b85encode = base85.b85encode
cookielib = pycompat.cookielib
-empty = pycompat.empty
httplib = pycompat.httplib
pickle = pycompat.pickle
-queue = pycompat.queue
safehasattr = pycompat.safehasattr
socketserver = pycompat.socketserver
bytesio = pycompat.bytesio
@@ -1896,7 +1892,7 @@
# work around issue2543 (or testfile may get lost on Samba shares)
f1, f2, fp = None, None, None
try:
- fd, f1 = tempfile.mkstemp(prefix='.%s-' % os.path.basename(testfile),
+ fd, f1 = pycompat.mkstemp(prefix='.%s-' % os.path.basename(testfile),
suffix='1~', dir=os.path.dirname(testfile))
os.close(fd)
f2 = '%s2~' % f1[:-2]
@@ -1942,7 +1938,7 @@
Returns the name of the temporary file.
"""
d, fn = os.path.split(name)
- fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, suffix='~', dir=d)
+ fd, temp = pycompat.mkstemp(prefix='.%s-' % fn, suffix='~', dir=d)
os.close(fd)
# Temporary files are created with mode 0600, which is usually not
# what we want. If the original file already exists, just copy
@@ -2719,7 +2715,7 @@
'query', 'fragment'):
v = getattr(self, a)
if v is not None:
- attrs.append('%s: %r' % (a, v))
+ attrs.append('%s: %r' % (a, pycompat.bytestr(v)))
return '<url %s>' % ', '.join(attrs)
def __bytes__(self):
@@ -2921,6 +2917,7 @@
finally:
elapsed = timer() - start
_timenesting[0] -= indent
+ stderr = procutil.stderr
stderr.write('%s%s: %s\n' %
(' ' * _timenesting[0], func.__name__,
timecount(elapsed)))
@@ -3784,93 +3781,3 @@
if not (byte & 0x80):
return result
shift += 7
-
-###
-# Deprecation warnings for util.py splitting
-###
-
-def _deprecatedfunc(func, version, modname=None):
- def wrapped(*args, **kwargs):
- fn = pycompat.sysbytes(func.__name__)
- mn = modname or pycompat.sysbytes(func.__module__)[len('mercurial.'):]
- msg = "'util.%s' is deprecated, use '%s.%s'" % (fn, mn, fn)
- nouideprecwarn(msg, version, stacklevel=2)
- return func(*args, **kwargs)
- wrapped.__name__ = func.__name__
- return wrapped
-
-defaultdateformats = dateutil.defaultdateformats
-extendeddateformats = dateutil.extendeddateformats
-makedate = _deprecatedfunc(dateutil.makedate, '4.6')
-datestr = _deprecatedfunc(dateutil.datestr, '4.6')
-shortdate = _deprecatedfunc(dateutil.shortdate, '4.6')
-parsetimezone = _deprecatedfunc(dateutil.parsetimezone, '4.6')
-strdate = _deprecatedfunc(dateutil.strdate, '4.6')
-parsedate = _deprecatedfunc(dateutil.parsedate, '4.6')
-matchdate = _deprecatedfunc(dateutil.matchdate, '4.6')
-
-stderr = procutil.stderr
-stdin = procutil.stdin
-stdout = procutil.stdout
-explainexit = _deprecatedfunc(procutil.explainexit, '4.6',
- modname='utils.procutil')
-findexe = _deprecatedfunc(procutil.findexe, '4.6', modname='utils.procutil')
-getuser = _deprecatedfunc(procutil.getuser, '4.6', modname='utils.procutil')
-getpid = _deprecatedfunc(procutil.getpid, '4.6', modname='utils.procutil')
-hidewindow = _deprecatedfunc(procutil.hidewindow, '4.6',
- modname='utils.procutil')
-popen = _deprecatedfunc(procutil.popen, '4.6', modname='utils.procutil')
-quotecommand = _deprecatedfunc(procutil.quotecommand, '4.6',
- modname='utils.procutil')
-readpipe = _deprecatedfunc(procutil.readpipe, '4.6', modname='utils.procutil')
-setbinary = _deprecatedfunc(procutil.setbinary, '4.6', modname='utils.procutil')
-setsignalhandler = _deprecatedfunc(procutil.setsignalhandler, '4.6',
- modname='utils.procutil')
-shellquote = _deprecatedfunc(procutil.shellquote, '4.6',
- modname='utils.procutil')
-shellsplit = _deprecatedfunc(procutil.shellsplit, '4.6',
- modname='utils.procutil')
-spawndetached = _deprecatedfunc(procutil.spawndetached, '4.6',
- modname='utils.procutil')
-sshargs = _deprecatedfunc(procutil.sshargs, '4.6', modname='utils.procutil')
-testpid = _deprecatedfunc(procutil.testpid, '4.6', modname='utils.procutil')
-try:
- setprocname = _deprecatedfunc(procutil.setprocname, '4.6',
- modname='utils.procutil')
-except AttributeError:
- pass
-try:
- unblocksignal = _deprecatedfunc(procutil.unblocksignal, '4.6',
- modname='utils.procutil')
-except AttributeError:
- pass
-closefds = procutil.closefds
-isatty = _deprecatedfunc(procutil.isatty, '4.6')
-popen2 = _deprecatedfunc(procutil.popen2, '4.6')
-popen3 = _deprecatedfunc(procutil.popen3, '4.6')
-popen4 = _deprecatedfunc(procutil.popen4, '4.6')
-pipefilter = _deprecatedfunc(procutil.pipefilter, '4.6')
-tempfilter = _deprecatedfunc(procutil.tempfilter, '4.6')
-filter = _deprecatedfunc(procutil.filter, '4.6')
-mainfrozen = _deprecatedfunc(procutil.mainfrozen, '4.6')
-hgexecutable = _deprecatedfunc(procutil.hgexecutable, '4.6')
-isstdin = _deprecatedfunc(procutil.isstdin, '4.6')
-isstdout = _deprecatedfunc(procutil.isstdout, '4.6')
-shellenviron = _deprecatedfunc(procutil.shellenviron, '4.6')
-system = _deprecatedfunc(procutil.system, '4.6')
-gui = _deprecatedfunc(procutil.gui, '4.6')
-hgcmd = _deprecatedfunc(procutil.hgcmd, '4.6')
-rundetached = _deprecatedfunc(procutil.rundetached, '4.6')
-
-binary = _deprecatedfunc(stringutil.binary, '4.6')
-stringmatcher = _deprecatedfunc(stringutil.stringmatcher, '4.6')
-shortuser = _deprecatedfunc(stringutil.shortuser, '4.6')
-emailuser = _deprecatedfunc(stringutil.emailuser, '4.6')
-email = _deprecatedfunc(stringutil.email, '4.6')
-ellipsis = _deprecatedfunc(stringutil.ellipsis, '4.6')
-escapestr = _deprecatedfunc(stringutil.escapestr, '4.6')
-unescapestr = _deprecatedfunc(stringutil.unescapestr, '4.6')
-forcebytestr = _deprecatedfunc(stringutil.forcebytestr, '4.6')
-uirepr = _deprecatedfunc(stringutil.uirepr, '4.6')
-wrap = _deprecatedfunc(stringutil.wrap, '4.6')
-parsebool = _deprecatedfunc(stringutil.parsebool, '4.6')
--- a/mercurial/utils/cborutil.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/utils/cborutil.py Wed Jun 06 13:31:24 2018 -0400
@@ -140,12 +140,15 @@
yield BREAK
+def _mixedtypesortkey(v):
+ return type(v).__name__, v
+
def streamencodeset(s):
# https://www.iana.org/assignments/cbor-tags/cbor-tags.xhtml defines
# semantic tag 258 for finite sets.
yield encodelength(MAJOR_TYPE_SEMANTIC, 258)
- for chunk in streamencodearray(sorted(s)):
+ for chunk in streamencodearray(sorted(s, key=_mixedtypesortkey)):
yield chunk
def streamencodemap(d):
@@ -155,7 +158,8 @@
"""
yield encodelength(MAJOR_TYPE_MAP, len(d))
- for key, value in sorted(d.iteritems()):
+ for key, value in sorted(d.iteritems(),
+ key=lambda x: _mixedtypesortkey(x[0])):
for chunk in streamencode(key):
yield chunk
for chunk in streamencode(value):
--- a/mercurial/utils/procutil.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/utils/procutil.py Wed Jun 06 13:31:24 2018 -0400
@@ -16,7 +16,6 @@
import signal
import subprocess
import sys
-import tempfile
import time
from ..i18n import _
@@ -164,11 +163,11 @@
the temporary files generated.'''
inname, outname = None, None
try:
- infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
+ infd, inname = pycompat.mkstemp(prefix='hg-filter-in-')
fp = os.fdopen(infd, r'wb')
fp.write(s)
fp.close()
- outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
+ outfd, outname = pycompat.mkstemp(prefix='hg-filter-out-')
os.close(outfd)
cmd = cmd.replace('INFILE', inname)
cmd = cmd.replace('OUTFILE', outname)
--- a/mercurial/utils/stringutil.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/utils/stringutil.py Wed Jun 06 13:31:24 2018 -0400
@@ -23,7 +23,7 @@
pycompat,
)
-def pprint(o, bprefix=True):
+def pprint(o, bprefix=False):
"""Pretty print an object."""
if isinstance(o, bytes):
if bprefix:
@@ -40,16 +40,10 @@
'%s: %s' % (pprint(k, bprefix=bprefix),
pprint(v, bprefix=bprefix))
for k, v in sorted(o.items())))
- elif isinstance(o, bool):
- return b'True' if o else b'False'
- elif isinstance(o, int):
- return '%d' % o
- elif isinstance(o, float):
- return '%f' % o
- elif o is None:
- return b'None'
+ elif isinstance(o, tuple):
+ return '(%s)' % (b', '.join(pprint(a, bprefix=bprefix) for a in o))
else:
- raise error.ProgrammingError('do not know how to format %r' % o)
+ return pycompat.byterepr(o)
def binary(s):
"""return true if a string is binary data"""
--- a/mercurial/vfs.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/vfs.py Wed Jun 06 13:31:24 2018 -0400
@@ -11,7 +11,6 @@
import os
import shutil
import stat
-import tempfile
import threading
from .i18n import _
@@ -171,7 +170,7 @@
return os.mkdir(self.join(path))
def mkstemp(self, suffix='', prefix='tmp', dir=None):
- fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
+ fd, name = pycompat.mkstemp(suffix=suffix, prefix=prefix,
dir=self.join(dir))
dname, fname = util.split(name)
if dir:
@@ -568,7 +567,7 @@
ui.debug('starting %d threads for background file closing\n' %
threadcount)
- self._queue = util.queue(maxsize=maxqueue)
+ self._queue = pycompat.queue.Queue(maxsize=maxqueue)
self._running = True
for i in range(threadcount):
@@ -600,7 +599,7 @@
except Exception as e:
# Stash so can re-raise from main thread later.
self._threadexception = e
- except util.empty:
+ except pycompat.queue.Empty:
if not self._running:
break
--- a/mercurial/wireprotoserver.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/wireprotoserver.py Wed Jun 06 13:31:24 2018 -0400
@@ -18,7 +18,6 @@
from . import (
encoding,
error,
- hook,
pycompat,
util,
wireprototypes,
@@ -785,8 +784,7 @@
def __init__(self, ui, repo, logfh=None):
self._ui = ui
self._repo = repo
- self._fin = ui.fin
- self._fout = ui.fout
+ self._fin, self._fout = procutil.protectstdio(ui.fin, ui.fout)
# Log write I/O to stdout and stderr if configured.
if logfh:
@@ -795,15 +793,10 @@
ui.ferr = util.makeloggingfileobject(
logfh, ui.ferr, 'e', logdata=True)
- hook.redirect(True)
- ui.fout = repo.ui.fout = ui.ferr
-
- # Prevent insertion/deletion of CRs
- procutil.setbinary(self._fin)
- procutil.setbinary(self._fout)
-
def serve_forever(self):
self.serveuntil(threading.Event())
+ procutil.restorestdio(self._ui.fin, self._ui.fout,
+ self._fin, self._fout)
sys.exit(0)
def serveuntil(self, ev):
--- a/mercurial/wireprotov1server.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/wireprotov1server.py Wed Jun 06 13:31:24 2018 -0400
@@ -8,7 +8,6 @@
from __future__ import absolute_import
import os
-import tempfile
from .i18n import _
from .node import (
@@ -568,7 +567,7 @@
fp.close()
if tempname:
os.unlink(tempname)
- fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
+ fd, tempname = pycompat.mkstemp(prefix='hg-unbundle-')
repo.ui.debug('redirecting incoming bundle to %s\n' %
tempname)
fp = os.fdopen(fd, pycompat.sysstr('wb+'))
--- a/mercurial/worker.py Wed Jun 06 13:28:49 2018 -0400
+++ b/mercurial/worker.py Wed Jun 06 13:31:24 2018 -0400
@@ -235,7 +235,7 @@
# iteration.
if self._interrupted:
return
- except util.empty:
+ except pycompat.queue.Empty:
break
except Exception as e:
# store the exception such that the main thread can resurface
@@ -262,8 +262,8 @@
return
workers = _numworkers(ui)
- resultqueue = util.queue()
- taskqueue = util.queue()
+ resultqueue = pycompat.queue.Queue()
+ taskqueue = pycompat.queue.Queue()
# partition work to more pieces than workers to minimize the chance
# of uneven distribution of large tasks between the workers
for pargs in partition(args, workers * 20):
--- a/setup.py Wed Jun 06 13:28:49 2018 -0400
+++ b/setup.py Wed Jun 06 13:31:24 2018 -0400
@@ -360,7 +360,7 @@
write_if_changed('mercurial/__version__.py', b''.join([
b'# this file is autogenerated by setup.py\n'
- b'version = "%s"\n' % versionb,
+ b'version = b"%s"\n' % versionb,
]))
try:
@@ -1066,16 +1066,24 @@
package_data=packagedata,
cmdclass=cmdclass,
distclass=hgdist,
- options={'py2exe': {'packages': ['hgdemandimport', 'hgext', 'email',
- # implicitly imported per module policy
- # (cffi wouldn't be used as a frozen exe)
- 'mercurial.cext',
- #'mercurial.cffi',
- 'mercurial.pure']},
- 'bdist_mpkg': {'zipdist': False,
- 'license': 'COPYING',
- 'readme': 'contrib/macosx/Readme.html',
- 'welcome': 'contrib/macosx/Welcome.html',
- },
- },
+ options={
+ 'py2exe': {
+ 'packages': [
+ 'hgdemandimport',
+ 'hgext',
+ 'email',
+ # implicitly imported per module policy
+ # (cffi wouldn't be used as a frozen exe)
+ 'mercurial.cext',
+ #'mercurial.cffi',
+ 'mercurial.pure',
+ ],
+ },
+ 'bdist_mpkg': {
+ 'zipdist': False,
+ 'license': 'COPYING',
+ 'readme': 'contrib/packaging/macosx/Readme.html',
+ 'welcome': 'contrib/packaging/macosx/Welcome.html',
+ },
+ },
**extra)
--- a/tests/hghave.py Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/hghave.py Wed Jun 06 13:31:24 2018 -0400
@@ -616,7 +616,7 @@
"debian build dependencies (run dpkg-checkbuilddeps in contrib/)")
def has_debdeps():
# just check exit status (ignoring output)
- path = '%s/../contrib/debian/control' % os.environ['TESTDIR']
+ path = '%s/../contrib/packaging/debian/control' % os.environ['TESTDIR']
return matchoutput('dpkg-checkbuilddeps %s' % path, br'')
@check("demandimport", "demandimport enabled")
@@ -709,6 +709,10 @@
return int(mat.group(1)) > 5
return False
+@check("clang-6.0", "clang 6.0 with version suffix (libfuzzer included)")
+def has_clang60():
+ return matchoutput('clang-6.0 --version', b'clang version 6\.')
+
@check("xdiff", "xdiff algorithm")
def has_xdiff():
try:
--- a/tests/killdaemons.py Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/killdaemons.py Wed Jun 06 13:31:24 2018 -0400
@@ -124,4 +124,4 @@
else:
path = os.environ["DAEMON_PIDS"]
- killdaemons(path)
+ killdaemons(path, remove=True)
--- a/tests/printenv.py Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/printenv.py Wed Jun 06 13:31:24 2018 -0400
@@ -39,14 +39,15 @@
if k.startswith("HG_") and v]
env.sort()
-out.write("%s hook: " % name)
+out.write(b"%s hook: " % name.encode('ascii'))
if os.name == 'nt':
filter = lambda x: x.replace('\\', '/')
else:
filter = lambda x: x
-vars = ["%s=%s" % (k, filter(v)) for k, v in env]
-out.write(" ".join(vars))
-out.write("\n")
+vars = [b"%s=%s" % (k.encode('ascii'), filter(v).encode('ascii'))
+ for k, v in env]
+out.write(b" ".join(vars))
+out.write(b"\n")
out.close()
sys.exit(exitcode)
--- a/tests/run-tests.py Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/run-tests.py Wed Jun 06 13:31:24 2018 -0400
@@ -120,7 +120,7 @@
}
class TestRunnerLexer(lexer.RegexLexer):
- testpattern = r'[\w-]+\.(t|py)( \(case [\w-]+\))?'
+ testpattern = r'[\w-]+\.(t|py)(#[\w-]+)?'
tokens = {
'root': [
(r'^Skipped', token.Generic.Skipped, 'skipped'),
@@ -1247,7 +1247,7 @@
self._allcases = parsettestcases(path)
super(TTest, self).__init__(path, *args, **kwds)
if case:
- self.name = '%s (case %s)' % (self.name, _strpath(case))
+ self.name = '%s#%s' % (self.name, _strpath(case))
self.errpath = b'%s.%s.err' % (self.errpath[:-4], case)
self._tmpname += b'-%s' % case
self._have = {}
@@ -2646,16 +2646,31 @@
expanded_args.append(arg)
args = expanded_args
+ testcasepattern = re.compile(r'([\w-]+\.t|py)(#([\w-])+)')
tests = []
for t in args:
+ case = None
+
if not (os.path.basename(t).startswith(b'test-')
and (t.endswith(b'.py') or t.endswith(b'.t'))):
- continue
+
+ m = testcasepattern.match(t)
+ if m is not None:
+ t, _, case = m.groups()
+ else:
+ continue
+
if t.endswith(b'.t'):
# .t file may contain multiple test cases
cases = sorted(parsettestcases(t))
if cases:
- tests += [{'path': t, 'case': c} for c in sorted(cases)]
+ if case is not None and case in cases:
+ tests += [{'path': t, 'case': case}]
+ elif case is not None and case not in cases:
+ # Ignore invalid cases
+ pass
+ else:
+ tests += [{'path': t, 'case': c} for c in sorted(cases)]
else:
tests.append({'path': t})
else:
--- a/tests/test-archive.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-archive.t Wed Jun 06 13:31:24 2018 -0400
@@ -94,7 +94,7 @@
$ echo "archivesubrepos = True" >> .hg/hgrc
$ cp .hg/hgrc .hg/hgrc-base
> test_archtype() {
- > echo "allow_archive = $1" >> .hg/hgrc
+ > echo "allow-archive = $1" >> .hg/hgrc
> test_archtype_run "$@"
> }
> test_archtype_deprecated() {
@@ -293,7 +293,7 @@
body: size=1451, sha1=4c5cf0f574446c44feb7f88f4e0e2a56bd92c352
- $ echo "allow_archive = gz bz2 zip" >> .hg/hgrc
+ $ echo "allow-archive = gz bz2 zip" >> .hg/hgrc
$ hg serve -p $HGPORT -d --pid-file=hg.pid -E errors.log
$ cat hg.pid >> $DAEMON_PIDS
--- a/tests/test-bad-pull.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-bad-pull.t Wed Jun 06 13:31:24 2018 -0400
@@ -1,4 +1,4 @@
-#require serve killdaemons
+#require serve
$ hg clone http://localhost:$HGPORT/ copy
abort: * (glob)
--- a/tests/test-blackbox.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-blackbox.t Wed Jun 06 13:31:24 2018 -0400
@@ -1,13 +1,29 @@
setup
+
+ $ cat > myextension.py <<EOF
+ > from mercurial import error, registrar
+ > cmdtable = {}
+ > command = registrar.command(cmdtable)
+ > @command(b'crash', [], b'hg crash')
+ > def crash(ui, *args, **kwargs):
+ > raise Exception("oops")
+ > @command(b'abort', [], b'hg abort')
+ > def abort(ui, *args, **kwargs):
+ > raise error.Abort(b"oops")
+ > EOF
+ $ abspath=`pwd`/myextension.py
+
$ cat >> $HGRCPATH <<EOF
> [extensions]
> blackbox=
> mock=$TESTDIR/mockblackbox.py
> mq=
+ > myextension=$TESTTMP/myextension.py
> [alias]
> confuse = log --limit 3
> so-confusing = confuse --style compact
> EOF
+
$ hg init blackboxtest
$ cd blackboxtest
@@ -21,6 +37,32 @@
1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000 (5000)> add a exited 0 after * seconds (glob)
1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000+ (5000)> blackbox --config *blackbox.dirty=True* (glob)
+failure exit code
+ $ rm ./.hg/blackbox.log
+ $ hg add non-existent
+ non-existent: $ENOENT$
+ [1]
+ $ hg blackbox
+ 1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000 (5000)> add non-existent
+ 1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000 (5000)> add non-existent exited 1 after * seconds (glob)
+ 1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000 (5000)> blackbox
+
+abort exit code
+ $ rm ./.hg/blackbox.log
+ $ hg abort 2> /dev/null
+ [255]
+ $ hg blackbox -l 2
+ 1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000 (5000)> abort exited 255 after * seconds (glob)
+ 1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000 (5000)> blackbox -l 2
+
+unhandled exception
+ $ rm ./.hg/blackbox.log
+ $ hg crash 2> /dev/null
+ [1]
+ $ hg blackbox -l 2
+ 1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000 (5000)> crash exited 1 after * seconds (glob)
+ 1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000 (5000)> blackbox -l 2
+
alias expansion is logged
$ rm ./.hg/blackbox.log
$ hg confuse
@@ -206,7 +248,7 @@
committing changelog
updating the branch cache
committed changeset 0:0e46349438790c460c5c9f7546bfcd39b267bbd2
- result: None
+ result: 0
running: --debug commit -m commit2 -d 2000-01-02 foo
committing files:
foo
@@ -214,7 +256,7 @@
committing changelog
updating the branch cache
committed changeset 1:45589e459b2edfbf3dbde7e01f611d2c1e7453d7
- result: None
+ result: 0
running: --debug log -r 0
changeset: 0:0e46349438790c460c5c9f7546bfcd39b267bbd2
phase: draft
@@ -229,7 +271,7 @@
commit1
- result: None
+ result: 0
running: --debug log -r tip
changeset: 1:45589e459b2edfbf3dbde7e01f611d2c1e7453d7
tag: tip
@@ -245,7 +287,7 @@
commit2
- result: None
+ result: 0
$ hg blackbox
1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> updating the branch cache
1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> updated served branch cache in * seconds (glob)
--- a/tests/test-bookmarks.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-bookmarks.t Wed Jun 06 13:31:24 2018 -0400
@@ -68,6 +68,9 @@
X 0:f7b1eb17ad24
* X2 0:f7b1eb17ad24
Y -1:000000000000
+ $ hg log -T '{bookmarks % "{rev} {bookmark}\n"}'
+ 0 X
+ 0 X2
$ echo b > b
$ hg add b
@@ -299,6 +302,11 @@
Y 2:db815d6d32e6
Z 0:f7b1eb17ad24
* x y 2:db815d6d32e6
+ $ hg log -T '{bookmarks % "{rev} {bookmark}\n"}'
+ 2 Y
+ 2 x y
+ 1 X2
+ 0 Z
look up stripped bookmark name
@@ -445,6 +453,11 @@
Y 2:db815d6d32e6
* Z 2:db815d6d32e6
x y 2:db815d6d32e6
+ $ hg log -T '{bookmarks % "{rev} {bookmark}\n"}'
+ 2 Y
+ 2 Z
+ 2 x y
+ 1 X2
revision but no bookmark name
--- a/tests/test-bugzilla.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-bugzilla.t Wed Jun 06 13:31:24 2018 -0400
@@ -8,15 +8,15 @@
> configtable = {}
> configitem = registrar.configitem(configtable)
>
- > configitem('bugzilla', 'mocklog',
+ > configitem(b'bugzilla', b'mocklog',
> default=None,
> )
> def extsetup(ui):
- > bugzilla = extensions.find('bugzilla')
+ > bugzilla = extensions.find(b'bugzilla')
> class bzmock(bugzilla.bzaccess):
> def __init__(self, ui):
> super(bzmock, self).__init__(ui)
- > self._logfile = ui.config('bugzilla', 'mocklog')
+ > self._logfile = ui.config(b'bugzilla', b'mocklog')
> def updatebug(self, bugid, newstate, text, committer):
> with open(self._logfile, 'a') as f:
> f.write('update bugid=%r, newstate=%r, committer=%r\n'
@@ -26,7 +26,7 @@
> with open(self._logfile, 'a') as f:
> f.write('notify bugs=%r, committer=%r\n'
> % (bugs, committer))
- > bugzilla.bugzilla._versions['mock'] = bzmock
+ > bugzilla.bugzilla._versions[b'mock'] = bzmock
> EOF
set up mock repository:
--- a/tests/test-bundle-r.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-bundle-r.t Wed Jun 06 13:31:24 2018 -0400
@@ -156,6 +156,7 @@
adding file changes
added 4 changesets with 2 changes to 3 files (+1 heads)
new changesets c70afb1ee985:faa2e4234c7a
+ 1 changesets became public
(run 'hg heads' to see heads, 'hg merge' to merge)
$ hg verify
checking changesets
--- a/tests/test-bundle2-exchange.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-bundle2-exchange.t Wed Jun 06 13:31:24 2018 -0400
@@ -150,6 +150,7 @@
pulling from $TESTTMP/main
no changes found
pre-close-tip:24b6387c8c8c public
+ 1 changesets became public
postclose-tip:24b6387c8c8c public
txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=0 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_TXNNAME=pull
file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
--- a/tests/test-bundle2-remote-changegroup.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-bundle2-remote-changegroup.t Wed Jun 06 13:31:24 2018 -0400
@@ -1,5 +1,3 @@
-#require killdaemons
-
#testcases sshv1 sshv2
#if sshv2
--- a/tests/test-cbor.py Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-cbor.py Wed Jun 06 13:31:24 2018 -0400
@@ -69,7 +69,7 @@
dest = b''.join(cborutil.streamencodeindefinitebytestring(
source, chunksize=42))
- self.assertEqual(cbor.loads(dest), b''.join(source))
+ self.assertEqual(cbor.loads(dest), source)
def testreadtoiter(self):
source = io.BytesIO(b'\x5f\x44\xaa\xbb\xcc\xdd\x43\xee\xff\x99\xff')
--- a/tests/test-check-interfaces.py Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-check-interfaces.py Wed Jun 06 13:31:24 2018 -0400
@@ -25,6 +25,7 @@
filelog,
httppeer,
localrepo,
+ pycompat,
repository,
sshpeer,
statichttprepo,
@@ -37,7 +38,8 @@
wireprotov2server,
)
-rootdir = os.path.normpath(os.path.join(os.path.dirname(__file__), '..'))
+rootdir = pycompat.fsencode(
+ os.path.normpath(os.path.join(os.path.dirname(__file__), '..')))
def checkzobject(o, allowextra=False):
"""Verify an object with a zope interface."""
@@ -106,7 +108,7 @@
httppeer.httpv2peer)
ziverify.verifyClass(repository.ipeercapabilities,
httppeer.httpv2peer)
- checkzobject(httppeer.httpv2peer(None, '', None, None, None, None))
+ checkzobject(httppeer.httpv2peer(None, b'', b'', None, None, None))
ziverify.verifyClass(repository.ipeerbase,
localrepo.localpeer)
@@ -121,11 +123,11 @@
checkzobject(wireprotov1peer.peerexecutor(None))
ziverify.verifyClass(repository.ipeerbase, sshpeer.sshv1peer)
- checkzobject(sshpeer.sshv1peer(ui, 'ssh://localhost/foo', None, dummypipe(),
+ checkzobject(sshpeer.sshv1peer(ui, b'ssh://localhost/foo', b'', dummypipe(),
dummypipe(), None, None))
ziverify.verifyClass(repository.ipeerbase, sshpeer.sshv2peer)
- checkzobject(sshpeer.sshv2peer(ui, 'ssh://localhost/foo', None, dummypipe(),
+ checkzobject(sshpeer.sshv2peer(ui, b'ssh://localhost/foo', b'', dummypipe(),
dummypipe(), None, None))
ziverify.verifyClass(repository.ipeerbase, bundlerepo.bundlepeer)
@@ -163,8 +165,8 @@
ziverify.verifyClass(repository.ifilestorage, filelog.filelog)
- vfs = vfsmod.vfs('.')
- fl = filelog.filelog(vfs, 'dummy.i')
+ vfs = vfsmod.vfs(b'.')
+ fl = filelog.filelog(vfs, b'dummy.i')
checkzobject(fl, allowextra=True)
main()
--- a/tests/test-clone-uncompressed.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-clone-uncompressed.t Wed Jun 06 13:31:24 2018 -0400
@@ -1,4 +1,4 @@
-#require serve no-reposimplestore
+#require serve no-reposimplestore no-chg
#testcases stream-legacy stream-bundle2
--- a/tests/test-clonebundles.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-clonebundles.t Wed Jun 06 13:31:24 2018 -0400
@@ -1,4 +1,4 @@
-#require no-reposimplestore
+#require no-reposimplestore no-chg
Set up a server
@@ -129,6 +129,7 @@
adding file changes
added 1 changesets with 1 changes to 1 files
new changesets aaff8d2ffbbf
+ 1 changesets became public
Incremental pull doesn't fetch bundle
@@ -201,6 +202,7 @@
finished applying clone bundle
searching for changes
no changes found
+ 2 changesets became public
Feature works over SSH
@@ -213,6 +215,7 @@
finished applying clone bundle
searching for changes
no changes found
+ 2 changesets became public
Entry with unknown BUNDLESPEC is filtered and not used
@@ -232,6 +235,7 @@
finished applying clone bundle
searching for changes
no changes found
+ 2 changesets became public
Automatic fallback when all entries are filtered
@@ -269,6 +273,7 @@
finished applying clone bundle
searching for changes
no changes found
+ 2 changesets became public
#else
Python <2.7.9 will filter SNI URLs
@@ -373,6 +378,7 @@
finished applying clone bundle
searching for changes
no changes found
+ 2 changesets became public
Preferring bz2 type will download first entry of that type
@@ -385,6 +391,7 @@
finished applying clone bundle
searching for changes
no changes found
+ 2 changesets became public
Preferring multiple values of an option works
@@ -397,6 +404,7 @@
finished applying clone bundle
searching for changes
no changes found
+ 2 changesets became public
Sorting multiple values should get us back to original first entry
@@ -409,6 +417,7 @@
finished applying clone bundle
searching for changes
no changes found
+ 2 changesets became public
Preferring multiple attributes has correct order
@@ -421,6 +430,7 @@
finished applying clone bundle
searching for changes
no changes found
+ 2 changesets became public
Test where attribute is missing from some entries
@@ -440,6 +450,7 @@
finished applying clone bundle
searching for changes
no changes found
+ 2 changesets became public
Test interaction between clone bundles and --stream
@@ -545,3 +556,4 @@
finished applying clone bundle
searching for changes
no changes found
+ 2 changesets became public
--- a/tests/test-command-template.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-command-template.t Wed Jun 06 13:31:24 2018 -0400
@@ -3212,7 +3212,7 @@
$ hg log -R latesttag -r tip -T '{rev % "a"}\n'
- hg: parse error: keyword 'rev' is not iterable of mappings
+ hg: parse error: 11 is not iterable of mappings
[255]
$ hg log -R latesttag -r tip -T '{get(extras, "unknown") % "a"}\n'
hg: parse error: None is not iterable of mappings
@@ -3242,12 +3242,23 @@
$ hg log -R latesttag -l1 -T '{max(revset("0:9")) % "{rev}:{node|short}\n"}'
9:fbc7cd862e9c
-Test manifest/get() can be join()-ed as before, though it's silly:
-
- $ hg log -R latesttag -r tip -T '{join(manifest, "")}\n'
- 11:2bc6e9006ce2
- $ hg log -R latesttag -r tip -T '{join(get(extras, "branch"), "")}\n'
- default
+Test manifest/get() can be join()-ed as string, though it's silly:
+
+ $ hg log -R latesttag -r tip -T '{join(manifest, ".")}\n'
+ 1.1.:.2.b.c.6.e.9.0.0.6.c.e.2
+ $ hg log -R latesttag -r tip -T '{join(get(extras, "branch"), ".")}\n'
+ d.e.f.a.u.l.t
+
+Test join() over string
+
+ $ hg log -R latesttag -r tip -T '{join(rev|stringify, ".")}\n'
+ 1.1
+
+Test join() over uniterable
+
+ $ hg log -R latesttag -r tip -T '{join(rev, "")}\n'
+ hg: parse error: 11 is not iterable
+ [255]
Test min/max of integers
@@ -3916,6 +3927,15 @@
$ hg log --template '{shortest("not a hex string, but it'\''s 40 bytes long")}\n' -l1
not a hex string, but it's 40 bytes long
+ $ hg log --template '{shortest("ffffffffffffffffffffffffffffffffffffffff")}\n' -l1
+ ffff
+
+ $ hg log --template '{shortest("fffffff")}\n' -l1
+ ffff
+
+ $ hg log --template '{shortest("ff")}\n' -l1
+ ffff
+
$ cd ..
Test shortest(node) with the repo having short hash collision:
@@ -4682,6 +4702,13 @@
$ HGENCODING=ascii hg log -T "{desc|json}\n" -r0
"non-ascii branch: \u00e9"
+json filter should take input as utf-8 if it was converted from utf-8:
+
+ $ HGENCODING=latin-1 hg log -T "{branch|json}\n" -r0
+ "\u00e9"
+ $ HGENCODING=latin-1 hg log -T "{desc|json}\n" -r0
+ "non-ascii branch: \u00e9"
+
json filter takes input as utf-8b:
$ HGENCODING=ascii hg log -T "{'`cat utf-8`'|json}\n" -l1
--- a/tests/test-commit-interactive-curses.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-commit-interactive-curses.t Wed Jun 06 13:31:24 2018 -0400
@@ -68,7 +68,7 @@
Committing only one file
$ echo "a" >> a
- >>> open('b', 'wb').write("1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n")
+ >>> open('b', 'wb').write(b"1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n") and None
$ hg add b
$ cat <<EOF >testModeCommands
> TOGGLE
--- a/tests/test-completion.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-completion.t Wed Jun 06 13:31:24 2018 -0400
@@ -191,6 +191,7 @@
--pid-file
--port
--prefix
+ --print-url
--profile
--quiet
--repository
@@ -231,7 +232,7 @@
clone: noupdate, updaterev, rev, branch, pull, uncompressed, stream, ssh, remotecmd, insecure
commit: addremove, close-branch, amend, secret, edit, interactive, include, exclude, message, logfile, date, user, subrepos
diff: rev, change, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, unified, stat, root, include, exclude, subrepos
- export: output, switch-parent, rev, text, git, binary, nodates, template
+ export: bookmark, output, switch-parent, rev, text, git, binary, nodates, template
forget: interactive, include, exclude, dry-run
init: ssh, remotecmd, insecure
log: follow, follow-first, date, copies, keyword, rev, line-range, removed, only-merges, user, only-branch, branch, prune, patch, git, limit, no-merges, stat, graph, style, template, include, exclude
@@ -239,7 +240,7 @@
pull: update, force, rev, bookmark, branch, ssh, remotecmd, insecure
push: force, rev, bookmark, branch, new-branch, pushvars, ssh, remotecmd, insecure
remove: after, force, subrepos, include, exclude, dry-run
- serve: accesslog, daemon, daemon-postexec, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, cmdserver, templates, style, ipv6, certificate, subrepos
+ serve: accesslog, daemon, daemon-postexec, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, cmdserver, templates, style, ipv6, certificate, print-url, subrepos
status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, terse, copies, print0, rev, change, include, exclude, subrepos, template
summary: remote
update: clean, check, merge, date, rev, tool
--- a/tests/test-context-metadata.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-context-metadata.t Wed Jun 06 13:31:24 2018 -0400
@@ -23,7 +23,7 @@
> old = repo[b'.']
> kwargs = dict(s.split(b'=', 1) for s in arg.split(b';'))
> if 'parents' in kwargs:
- > kwargs[b'parents'] = kwargs[b'parents'].split(b',')
+ > kwargs[b'parents'] = map(int, kwargs[b'parents'].split(b','))
> new = context.metadataonlyctx(repo, old,
> **pycompat.strkwargs(kwargs))
> new.commit()
--- a/tests/test-context.py Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-context.py Wed Jun 06 13:31:24 2018 -0400
@@ -1,6 +1,7 @@
from __future__ import absolute_import, print_function
import os
import stat
+import sys
from mercurial.node import hex
from mercurial import (
context,
@@ -10,9 +11,24 @@
ui as uimod,
)
+print_ = print
+def print(*args, **kwargs):
+ """print() wrapper that flushes stdout buffers to avoid py3 buffer issues
+
+ We could also just write directly to sys.stdout.buffer the way the
+ ui object will, but this was easier for porting the test.
+ """
+ print_(*args, **kwargs)
+ sys.stdout.flush()
+
+def printb(data, end=b'\n'):
+ out = getattr(sys.stdout, 'buffer', sys.stdout)
+ out.write(data + end)
+ out.flush()
+
u = uimod.ui.load()
-repo = hg.repository(u, 'test1', create=1)
+repo = hg.repository(u, b'test1', create=1)
os.chdir('test1')
# create 'foo' with fixed time stamp
@@ -22,10 +38,10 @@
os.utime('foo', (1000, 1000))
# add+commit 'foo'
-repo[None].add(['foo'])
-repo.commit(text='commit1', date="0 0")
+repo[None].add([b'foo'])
+repo.commit(text=b'commit1', date=b"0 0")
-d = repo[None]['foo'].date()
+d = repo[None][b'foo'].date()
if os.name == 'nt':
d = d[:2]
print("workingfilectx.date = (%d, %d)" % d)
@@ -33,27 +49,28 @@
# test memctx with non-ASCII commit message
def filectxfn(repo, memctx, path):
- return context.memfilectx(repo, memctx, "foo", "")
+ return context.memfilectx(repo, memctx, b"foo", b"")
-ctx = context.memctx(repo, ['tip', None],
- encoding.tolocal("Gr\xc3\xbcezi!"),
- ["foo"], filectxfn)
+ctx = context.memctx(repo, [b'tip', None],
+ encoding.tolocal(b"Gr\xc3\xbcezi!"),
+ [b"foo"], filectxfn)
ctx.commit()
for enc in "ASCII", "Latin-1", "UTF-8":
encoding.encoding = enc
- print("%-8s: %s" % (enc, repo["tip"].description()))
+ printb(b"%-8s: %s" % (enc.encode('ascii'), repo[b"tip"].description()))
# test performing a status
def getfilectx(repo, memctx, f):
fctx = memctx.parents()[0][f]
data, flags = fctx.data(), fctx.flags()
- if f == 'foo':
- data += 'bar\n'
- return context.memfilectx(repo, memctx, f, data, 'l' in flags, 'x' in flags)
+ if f == b'foo':
+ data += b'bar\n'
+ return context.memfilectx(
+ repo, memctx, f, data, b'l' in flags, b'x' in flags)
ctxa = repo[0]
-ctxb = context.memctx(repo, [ctxa.node(), None], "test diff", ["foo"],
+ctxb = context.memctx(repo, [ctxa.node(), None], b"test diff", [b"foo"],
getfilectx, ctxa.user(), ctxa.date())
print(ctxb.status(ctxa))
@@ -61,26 +78,26 @@
# test performing a diff on a memctx
for d in ctxb.diff(ctxa, git=True):
- print(d, end='')
+ printb(d, end=b'')
# test safeness and correctness of "ctx.status()"
print('= checking context.status():')
# ancestor "wcctx ~ 2"
-actx2 = repo['.']
+actx2 = repo[b'.']
-repo.wwrite('bar-m', 'bar-m\n', '')
-repo.wwrite('bar-r', 'bar-r\n', '')
-repo[None].add(['bar-m', 'bar-r'])
-repo.commit(text='add bar-m, bar-r', date="0 0")
+repo.wwrite(b'bar-m', b'bar-m\n', b'')
+repo.wwrite(b'bar-r', b'bar-r\n', b'')
+repo[None].add([b'bar-m', b'bar-r'])
+repo.commit(text=b'add bar-m, bar-r', date=b"0 0")
# ancestor "wcctx ~ 1"
-actx1 = repo['.']
+actx1 = repo[b'.']
-repo.wwrite('bar-m', 'bar-m bar-m\n', '')
-repo.wwrite('bar-a', 'bar-a\n', '')
-repo[None].add(['bar-a'])
-repo[None].forget(['bar-r'])
+repo.wwrite(b'bar-m', b'bar-m bar-m\n', b'')
+repo.wwrite(b'bar-a', b'bar-a\n', b'')
+repo[None].add([b'bar-a'])
+repo[None].forget([b'bar-r'])
# status at this point:
# M bar-m
@@ -97,10 +114,10 @@
print('=== with "pattern match":')
print(actx1.status(other=wctx,
- match=scmutil.matchfiles(repo, ['bar-m', 'foo'])))
+ match=scmutil.matchfiles(repo, [b'bar-m', b'foo'])))
print('wctx._status=%s' % (str(wctx._status)))
print(actx2.status(other=wctx,
- match=scmutil.matchfiles(repo, ['bar-m', 'foo'])))
+ match=scmutil.matchfiles(repo, [b'bar-m', b'foo'])))
print('wctx._status=%s' % (str(wctx._status)))
print('=== with "always match" and "listclean=True":')
@@ -112,11 +129,11 @@
print("== checking workingcommitctx.status:")
wcctx = context.workingcommitctx(repo,
- scmutil.status(['bar-m'],
- ['bar-a'],
+ scmutil.status([b'bar-m'],
+ [b'bar-a'],
[],
[], [], [], []),
- text='', date='0 0')
+ text=b'', date=b'0 0')
print('wcctx._status=%s' % (str(wcctx._status)))
print('=== with "always match":')
@@ -133,19 +150,19 @@
print('=== with "pattern match":')
print(actx1.status(other=wcctx,
- match=scmutil.matchfiles(repo, ['bar-m', 'foo'])))
+ match=scmutil.matchfiles(repo, [b'bar-m', b'foo'])))
print('wcctx._status=%s' % (str(wcctx._status)))
print(actx2.status(other=wcctx,
- match=scmutil.matchfiles(repo, ['bar-m', 'foo'])))
+ match=scmutil.matchfiles(repo, [b'bar-m', b'foo'])))
print('wcctx._status=%s' % (str(wcctx._status)))
print('=== with "pattern match" and "listclean=True":')
print(actx1.status(other=wcctx,
- match=scmutil.matchfiles(repo, ['bar-r', 'foo']),
+ match=scmutil.matchfiles(repo, [b'bar-r', b'foo']),
listclean=True))
print('wcctx._status=%s' % (str(wcctx._status)))
print(actx2.status(other=wcctx,
- match=scmutil.matchfiles(repo, ['bar-r', 'foo']),
+ match=scmutil.matchfiles(repo, [b'bar-r', b'foo']),
listclean=True))
print('wcctx._status=%s' % (str(wcctx._status)))
@@ -154,7 +171,7 @@
# test manifestlog being changed
print('== commit with manifestlog invalidated')
-repo = hg.repository(u, 'test2', create=1)
+repo = hg.repository(u, b'test2', create=1)
os.chdir('test2')
# make some commits
@@ -166,12 +183,12 @@
date=(0, 0))
ctx.p1().manifest() # side effect: cache manifestctx
n = repo.commitctx(ctx)
- print('commit %s: %s' % (i, hex(n)))
+ printb(b'commit %s: %s' % (i, hex(n)))
# touch 00manifest.i mtime so storecache could expire.
# repo.__dict__['manifestlog'] is deleted by transaction releasefn.
- st = repo.svfs.stat('00manifest.i')
- repo.svfs.utime('00manifest.i',
+ st = repo.svfs.stat(b'00manifest.i')
+ repo.svfs.utime(b'00manifest.i',
(st[stat.ST_MTIME] + 1, st[stat.ST_MTIME] + 1))
# read the file just committed
@@ -181,11 +198,11 @@
except Exception as ex:
print('cannot read data: %r' % ex)
-with repo.wlock(), repo.lock(), repo.transaction('test'):
+with repo.wlock(), repo.lock(), repo.transaction(b'test'):
with open(b'4', 'wb') as f:
f.write(b'4')
- repo.dirstate.normal('4')
- repo.commit('4')
+ repo.dirstate.normal(b'4')
+ repo.commit(b'4')
revsbefore = len(repo.changelog)
repo.invalidate(clearfilecache=True)
revsafter = len(repo.changelog)
--- a/tests/test-convert-cvs.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-convert-cvs.t Wed Jun 06 13:31:24 2018 -0400
@@ -12,10 +12,10 @@
$ echo "convert = " >> $HGRCPATH
$ cat > cvshooks.py <<EOF
> def cvslog(ui,repo,hooktype,log):
- > ui.write('%s hook: %d entries\n' % (hooktype,len(log)))
+ > ui.write(b'%s hook: %d entries\n' % (hooktype,len(log)))
>
> def cvschangesets(ui,repo,hooktype,changesets):
- > ui.write('%s hook: %d changesets\n' % (hooktype,len(changesets)))
+ > ui.write(b'%s hook: %d changesets\n' % (hooktype,len(changesets)))
> EOF
$ hookpath=`pwd`
$ cat <<EOF >> $HGRCPATH
@@ -522,8 +522,8 @@
$ mkdir -p cvsrepo/transcoding
$ python <<EOF
- > fp = open('cvsrepo/transcoding/file,v', 'w')
- > fp.write(('''
+ > fp = open('cvsrepo/transcoding/file,v', 'wb')
+ > fp.write((b'''
> head 1.4;
> access;
> symbols
@@ -570,7 +570,7 @@
>
> 1.4
> log
- > @''' + u'\u3042'.encode('cp932') + ''' (cp932)
+ > @''' + u'\u3042'.encode('cp932') + b''' (cp932)
> @
> text
> @1
@@ -582,7 +582,7 @@
>
> 1.3
> log
- > @''' + u'\u3042'.encode('euc-jp') + ''' (euc-jp)
+ > @''' + u'\u3042'.encode('euc-jp') + b''' (euc-jp)
> @
> text
> @d4 1
@@ -591,7 +591,7 @@
>
> 1.2
> log
- > @''' + u'\u3042'.encode('utf-8') + ''' (utf-8)
+ > @''' + u'\u3042'.encode('utf-8') + b''' (utf-8)
> @
> text
> @d3 1
--- a/tests/test-convert.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-convert.t Wed Jun 06 13:31:24 2018 -0400
@@ -419,6 +419,7 @@
pulling from ../a
searching for changes
no changes found
+ 5 changesets became public
conversion to existing file should fail
--- a/tests/test-custom-filters.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-custom-filters.t Wed Jun 06 13:31:24 2018 -0400
@@ -12,15 +12,15 @@
$ cat > prefix.py <<EOF
> from mercurial import error
> def stripprefix(s, cmd, filename, **kwargs):
- > header = '%s\n' % cmd
+ > header = b'%s\n' % cmd
> if s[:len(header)] != header:
- > raise error.Abort('missing header "%s" in %s' % (cmd, filename))
+ > raise error.Abort(b'missing header "%s" in %s' % (cmd, filename))
> return s[len(header):]
> def insertprefix(s, cmd):
- > return '%s\n%s' % (cmd, s)
+ > return b'%s\n%s' % (cmd, s)
> def reposetup(ui, repo):
- > repo.adddatafilter('stripprefix:', stripprefix)
- > repo.adddatafilter('insertprefix:', insertprefix)
+ > repo.adddatafilter(b'stripprefix:', stripprefix)
+ > repo.adddatafilter(b'insertprefix:', insertprefix)
> EOF
$ cat > .hgignore <<EOF
--- a/tests/test-demandimport.py Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-demandimport.py Wed Jun 06 13:31:24 2018 -0400
@@ -58,12 +58,12 @@
print("os.system =", f(os.system))
print("os =", f(os))
-from mercurial import util
+from mercurial.utils import procutil
-print("util =", f(util))
-print("util.system =", f(util.system))
-print("util =", f(util))
-print("util.system =", f(util.system))
+print("procutil =", f(procutil))
+print("procutil.system =", f(procutil.system))
+print("procutil =", f(procutil))
+print("procutil.system =", f(procutil.system))
from mercurial import hgweb
print("hgweb =", f(hgweb))
@@ -100,6 +100,8 @@
print('contextlib.unknownattr = ImportError: %s'
% rsub(r"'", '', str(inst)))
+from mercurial import util
+
# Unlike the import statement, __import__() function should not raise
# ImportError even if fromlist has an unknown item
# (see Python/import.c:import_module_level() and ensure_fromlist())
--- a/tests/test-demandimport.py.out Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-demandimport.py.out Wed Jun 06 13:31:24 2018 -0400
@@ -7,10 +7,10 @@
os = <unloaded module 'os'>
os.system = <built-in function system>
os = <module 'os' from '?'>
-util = <unloaded module 'util'>
-util.system = <function system at 0x?>
-util = <module 'mercurial.util' from '?'>
-util.system = <function system at 0x?>
+procutil = <unloaded module 'procutil'>
+procutil.system = <function system at 0x?>
+procutil = <module 'mercurial.utils.procutil' from '?'>
+procutil.system = <function system at 0x?>
hgweb = <unloaded module 'hgweb'>
hgweb_mod = <unloaded module 'hgweb_mod'>
hgweb = <module 'mercurial.hgweb' from '?'>
--- a/tests/test-directaccess.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-directaccess.t Wed Jun 06 13:31:24 2018 -0400
@@ -156,9 +156,9 @@
`hg update`
$ hg up 28ad74
- updating to a hidden changeset 28ad74487de9
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to hidden changeset 28ad74487de9
(hidden revision '28ad74487de9' was rewritten as: 2443a0e66469)
- 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg up 3
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-dispatch.py Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-dispatch.py Wed Jun 06 13:31:24 2018 -0400
@@ -1,18 +1,24 @@
from __future__ import absolute_import, print_function
import os
+import sys
from mercurial import (
dispatch,
)
+def printb(data, end=b'\n'):
+ out = getattr(sys.stdout, 'buffer', sys.stdout)
+ out.write(data + end)
+ out.flush()
+
def testdispatch(cmd):
"""Simple wrapper around dispatch.dispatch()
Prints command and result value, but does not handle quoting.
"""
- print(b"running: %s" % (cmd,))
+ printb(b"running: %s" % (cmd,))
req = dispatch.request(cmd.split())
result = dispatch.dispatch(req)
- print(b"result: %r" % (result,))
+ printb(b"result: %r" % (result,))
testdispatch(b"init test1")
os.chdir('test1')
--- a/tests/test-dispatch.py.out Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-dispatch.py.out Wed Jun 06 13:31:24 2018 -0400
@@ -1,18 +1,18 @@
running: init test1
-result: None
+result: 0
running: add foo
result: 0
running: commit -m commit1 -d 2000-01-01 foo
-result: None
+result: 0
running: commit -m commit2 -d 2000-01-02 foo
-result: None
+result: 0
running: log -r 0
changeset: 0:0e4634943879
user: test
date: Sat Jan 01 00:00:00 2000 +0000
summary: commit1
-result: None
+result: 0
running: log -r tip
changeset: 1:45589e459b2e
tag: tip
@@ -20,4 +20,4 @@
date: Sun Jan 02 00:00:00 2000 +0000
summary: commit2
-result: None
+result: 0
--- a/tests/test-encoding-func.py Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-encoding-func.py Wed Jun 06 13:31:24 2018 -0400
@@ -35,11 +35,46 @@
self.assertTrue(s is encoding.fromlocal(s))
class Utf8bEncodingTest(unittest.TestCase):
+ def setUp(self):
+ self.origencoding = encoding.encoding
+
+ def tearDown(self):
+ encoding.encoding = self.origencoding
+
def testasciifastpath(self):
s = b'\0' * 100
self.assertTrue(s is encoding.toutf8b(s))
self.assertTrue(s is encoding.fromutf8b(s))
+ def testlossylatin(self):
+ encoding.encoding = b'ascii'
+ s = u'\xc0'.encode('utf-8')
+ l = encoding.tolocal(s)
+ self.assertEqual(l, b'?') # lossy
+ self.assertEqual(s, encoding.toutf8b(l)) # utf8 sequence preserved
+
+ def testlosslesslatin(self):
+ encoding.encoding = b'latin-1'
+ s = u'\xc0'.encode('utf-8')
+ l = encoding.tolocal(s)
+ self.assertEqual(l, b'\xc0') # lossless
+ self.assertEqual(s, encoding.toutf8b(l)) # convert back to utf-8
+
+ def testlossy0xed(self):
+ encoding.encoding = b'euc-kr' # U+Dxxx Hangul
+ s = u'\ud1bc\xc0'.encode('utf-8')
+ l = encoding.tolocal(s)
+ self.assertIn(b'\xed', l)
+ self.assertTrue(l.endswith(b'?')) # lossy
+ self.assertEqual(s, encoding.toutf8b(l)) # utf8 sequence preserved
+
+ def testlossless0xed(self):
+ encoding.encoding = b'euc-kr' # U+Dxxx Hangul
+ s = u'\ud1bc'.encode('utf-8')
+ l = encoding.tolocal(s)
+ self.assertEqual(l, b'\xc5\xed') # lossless
+ self.assertEqual(s, encoding.toutf8b(l)) # convert back to utf-8
+
if __name__ == '__main__':
import silenttestrunner
silenttestrunner.main(__name__)
--- a/tests/test-export.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-export.t Wed Jun 06 13:31:24 2018 -0400
@@ -101,6 +101,44 @@
$ grep HG foo-foo_3.patch | wc -l
\s*1 (re)
+Using bookmarks:
+
+ $ hg book -f -r 9 @
+ $ hg book -f -r 11 test
+ $ hg export -B test
+ # HG changeset patch
+ # User test
+ # Date 0 0
+ # Thu Jan 01 00:00:00 1970 +0000
+ # Node ID 5f17a83f5fbd9414006a5e563eab4c8a00729efd
+ # Parent 747d3c68f8ec44bb35816bfcd59aeb50b9654c2f
+ foo-10
+
+ diff -r 747d3c68f8ec -r 5f17a83f5fbd foo
+ --- a/foo Thu Jan 01 00:00:00 1970 +0000
+ +++ b/foo Thu Jan 01 00:00:00 1970 +0000
+ @@ -8,3 +8,4 @@
+ foo-7
+ foo-8
+ foo-9
+ +foo-10
+ # HG changeset patch
+ # User test
+ # Date 0 0
+ # Thu Jan 01 00:00:00 1970 +0000
+ # Node ID f3acbafac161ec68f1598af38f794f28847ca5d3
+ # Parent 5f17a83f5fbd9414006a5e563eab4c8a00729efd
+ foo-11
+
+ diff -r 5f17a83f5fbd -r f3acbafac161 foo
+ --- a/foo Thu Jan 01 00:00:00 1970 +0000
+ +++ b/foo Thu Jan 01 00:00:00 1970 +0000
+ @@ -9,3 +9,4 @@
+ foo-8
+ foo-9
+ foo-10
+ +foo-11
+
Exporting 4 changesets to a file:
$ hg export -o export_internal 1 2 3 4
--- a/tests/test-extdata.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-extdata.t Wed Jun 06 13:31:24 2018 -0400
@@ -82,6 +82,13 @@
$ hg log -T "{extdata('unknown')}\n"
abort: unknown extdata source 'unknown'
[255]
+ $ hg log -T "{extdata(unknown)}\n"
+ hg: parse error: empty data source specified
+ (did you mean extdata('unknown')?)
+ [255]
+ $ hg log -T "{extdata('{unknown}')}\n"
+ hg: parse error: empty data source specified
+ [255]
we don't fix up relative file URLs, but we do run shell commands in repo root
--- a/tests/test-extension.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-extension.t Wed Jun 06 13:31:24 2018 -0400
@@ -460,7 +460,7 @@
> result.append(absdetail)
> result.append(legacydetail)
> result.append(proxied.detail)
- > ui.write('LIB: %s\n' % '\nLIB: '.join(result))
+ > ui.write(b'LIB: %s\n' % '\nLIB: '.join(result))
> EOF
Examine module importing.
@@ -1229,9 +1229,14 @@
$ cat > hgext/forest.py <<EOF
> cmdtable = None
+ > @command()
+ > def f():
+ > pass
+ > @command(123)
+ > def g():
+ > pass
> EOF
$ hg --config extensions.path=./path.py help foo > /dev/null
- warning: error finding commands in $TESTTMP/hgext/forest.py
abort: no such help topic: foo
(try 'hg help --keyword foo')
[255]
@@ -1283,7 +1288,7 @@
If the extensions declare outdated versions, accuse the older extension first:
$ echo "from mercurial import util" >> older.py
- $ echo "util.version = lambda:'2.2'" >> older.py
+ $ echo "util.version = lambda:b'2.2'" >> older.py
$ echo "testedwith = b'1.9.3'" >> older.py
$ echo "testedwith = b'2.1.1'" >> throw.py
$ rm -f throw.pyc throw.pyo
@@ -1388,7 +1393,7 @@
Enabled extensions:
throw external 1.2.3
- $ echo 'getversion = lambda: "1.twentythree"' >> throw.py
+ $ echo 'getversion = lambda: b"1.twentythree"' >> throw.py
$ rm -f throw.pyc throw.pyo
$ rm -Rf __pycache__
$ hg version -v --config extensions.throw=throw.py --config extensions.strip=
@@ -1517,6 +1522,14 @@
reposetup() for $TESTTMP/reposetup-test/src
reposetup() for $TESTTMP/reposetup-test/src (chg !)
+ $ hg --cwd src debugextensions
+ reposetup() for $TESTTMP/reposetup-test/src
+ dodo (untested!)
+ dudu (untested!)
+ mq
+ reposetuptest (untested!)
+ strip
+
$ hg clone -U src clone-dst1
reposetup() for $TESTTMP/reposetup-test/src
$ hg init push-dst1
@@ -1670,7 +1683,7 @@
> def exbookmarks(orig, *args, **opts):
> return orig(*args, **opts)
> def uisetup(ui):
- > synopsis = ' GREPME [--foo] [-x]'
+ > synopsis = b' GREPME [--foo] [-x]'
> docstring = '''
> GREPME make sure that this is in the help!
> '''
@@ -1697,10 +1710,6 @@
> pass
> EOF
- $ hg --config extensions.nonregistrar=`pwd`/nonregistrar.py version > /dev/null
- devel-warn: cmdutil.command is deprecated, use registrar.command to register 'foo'
- (compatibility will be dropped after Mercurial-4.6, update your code.) * (glob)
-
Prohibit the use of unicode strings as the default value of options
$ hg init $TESTTMP/opt-unicode-default
@@ -1709,9 +1718,9 @@
> from mercurial import registrar
> cmdtable = {}
> command = registrar.command(cmdtable)
- > @command(b'dummy', [('', 'opt', u'value', u'help')], 'ext [OPTIONS]')
+ > @command(b'dummy', [(b'', b'opt', u'value', u'help')], 'ext [OPTIONS]')
> def ext(*args, **opts):
- > print(opts['opt'])
+ > print(opts[b'opt'])
> EOF
$ cat > $TESTTMP/opt-unicode-default/.hg/hgrc << EOF
> [extensions]
--- a/tests/test-extensions-afterloaded.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-extensions-afterloaded.t Wed Jun 06 13:31:24 2018 -0400
@@ -3,16 +3,16 @@
$ cat > foo.py <<EOF
> from mercurial import extensions
> def uisetup(ui):
- > ui.write("foo.uisetup\\n")
+ > ui.write(b"foo.uisetup\\n")
> ui.flush()
> def bar_loaded(loaded):
- > ui.write("foo: bar loaded: %r\\n" % (loaded,))
+ > ui.write(b"foo: bar loaded: %r\\n" % (loaded,))
> ui.flush()
- > extensions.afterloaded('bar', bar_loaded)
+ > extensions.afterloaded(b'bar', bar_loaded)
> EOF
$ cat > bar.py <<EOF
> def uisetup(ui):
- > ui.write("bar.uisetup\\n")
+ > ui.write(b"bar.uisetup\\n")
> ui.flush()
> EOF
$ basepath=`pwd`
@@ -72,9 +72,9 @@
$ cd ..
$ cat > minvers.py <<EOF
- > minimumhgversion = '9999.9999'
+ > minimumhgversion = b'9999.9999'
> def uisetup(ui):
- > ui.write("minvers.uisetup\\n")
+ > ui.write(b"minvers.uisetup\\n")
> ui.flush()
> EOF
$ hg init minversion
--- a/tests/test-extensions-wrapfunction.py Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-extensions-wrapfunction.py Wed Jun 06 13:31:24 2018 -0400
@@ -35,7 +35,7 @@
print('unwrap %s: %s: %s' % (getid(w), getid(result), msg))
batchwrap(wrappers + [wrappers[0]])
-batchunwrap([(wrappers[i] if i >= 0 else None)
+batchunwrap([(wrappers[i] if i is not None and i >= 0 else None)
for i in [3, None, 0, 4, 0, 2, 1, None]])
wrap0 = extensions.wrappedfunction(dummy, 'getstack', wrappers[0])
--- a/tests/test-filecache.py Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-filecache.py Wed Jun 06 13:31:24 2018 -0400
@@ -8,6 +8,16 @@
'cacheable']):
sys.exit(80)
+print_ = print
+def print(*args, **kwargs):
+ """print() wrapper that flushes stdout buffers to avoid py3 buffer issues
+
+ We could also just write directly to sys.stdout.buffer the way the
+ ui object will, but this was easier for porting the test.
+ """
+ print_(*args, **kwargs)
+ sys.stdout.flush()
+
from mercurial import (
extensions,
hg,
@@ -46,7 +56,7 @@
def invalidate(self):
for k in self._filecache:
try:
- delattr(self, k)
+ delattr(self, pycompat.sysstr(k))
except AttributeError:
pass
@@ -84,8 +94,8 @@
# atomic replace file, size doesn't change
# hopefully st_mtime doesn't change as well so this doesn't use the cache
# because of inode change
- f = vfsmod.vfs('.')('x', 'w', atomictemp=True)
- f.write('b')
+ f = vfsmod.vfs(b'.')(b'x', b'w', atomictemp=True)
+ f.write(b'b')
f.close()
repo.invalidate()
@@ -108,19 +118,19 @@
# should recreate the object
repo.cached
- f = vfsmod.vfs('.')('y', 'w', atomictemp=True)
- f.write('B')
+ f = vfsmod.vfs(b'.')(b'y', b'w', atomictemp=True)
+ f.write(b'B')
f.close()
repo.invalidate()
print("* file y changed inode")
repo.cached
- f = vfsmod.vfs('.')('x', 'w', atomictemp=True)
- f.write('c')
+ f = vfsmod.vfs(b'.')(b'x', b'w', atomictemp=True)
+ f.write(b'c')
f.close()
- f = vfsmod.vfs('.')('y', 'w', atomictemp=True)
- f.write('C')
+ f = vfsmod.vfs(b'.')(b'y', b'w', atomictemp=True)
+ f.write(b'C')
f.close()
repo.invalidate()
@@ -155,14 +165,14 @@
repo = hg.repository(uimod.ui.load())
# first rollback clears the filecache, but changelog to stays in __dict__
repo.rollback()
- repo.commit('.')
+ repo.commit(b'.')
# second rollback comes along and touches the changelog externally
# (file is moved)
repo.rollback()
# but since changelog isn't under the filecache control anymore, we don't
# see that it changed, and return the old changelog without reconstructing
# it
- repo.commit('.')
+ repo.commit(b'.')
def setbeforeget(repo):
os.remove('x')
--- a/tests/test-filelog.py Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-filelog.py Wed Jun 06 13:31:24 2018 -0400
@@ -14,21 +14,21 @@
)
myui = uimod.ui.load()
-repo = hg.repository(myui, path='.', create=True)
+repo = hg.repository(myui, path=b'.', create=True)
-fl = repo.file('foobar')
+fl = repo.file(b'foobar')
def addrev(text, renamed=False):
if renamed:
# data doesn't matter. Just make sure filelog.renamed() returns True
- meta = {'copyrev': hex(nullid), 'copy': 'bar'}
+ meta = {b'copyrev': hex(nullid), b'copy': b'bar'}
else:
meta = {}
lock = t = None
try:
lock = repo.lock()
- t = repo.transaction('commit')
+ t = repo.transaction(b'commit')
node = fl.add(text, meta, t, 0, nullid, nullid)
return node
finally:
@@ -40,8 +40,8 @@
def error(text):
print('ERROR: ' + text)
-textwith = '\1\nfoo'
-without = 'foo'
+textwith = b'\1\nfoo'
+without = b'foo'
node = addrev(textwith)
if not textwith == fl.read(node):
--- a/tests/test-fileset.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-fileset.t Wed Jun 06 13:31:24 2018 -0400
@@ -180,7 +180,7 @@
Test files properties
- >>> open('bin', 'wb').write(b'\0a')
+ >>> open('bin', 'wb').write(b'\0a') and None
$ fileset 'binary()'
$ fileset 'binary() and unknown()'
bin
@@ -219,8 +219,8 @@
$ hg --config ui.portablefilenames=ignore add con.xml
#endif
- >>> open('1k', 'wb').write(b' '*1024)
- >>> open('2k', 'wb').write(b' '*2048)
+ >>> open('1k', 'wb').write(b' '*1024) and None
+ >>> open('2k', 'wb').write(b' '*2048) and None
$ hg add 1k 2k
$ fileset 'size("bar")'
hg: parse error: couldn't parse size: bar
@@ -391,9 +391,9 @@
b2
c1
- >>> open('dos', 'wb').write("dos\r\n")
- >>> open('mixed', 'wb').write("dos\r\nunix\n")
- >>> open('mac', 'wb').write("mac\r")
+ >>> open('dos', 'wb').write(b"dos\r\n") and None
+ >>> open('mixed', 'wb').write(b"dos\r\nunix\n") and None
+ >>> open('mac', 'wb').write(b"mac\r") and None
$ hg add dos mixed mac
(remove a1, to examine safety of 'eol' on removed files)
@@ -434,7 +434,7 @@
> from mercurial import registrar
>
> filesetpredicate = registrar.filesetpredicate()
- > @filesetpredicate('existingcaller()', callexisting=False)
+ > @filesetpredicate(b'existingcaller()', callexisting=False)
> def existingcaller(mctx, x):
> # this 'mctx.existing()' invocation is unintentional
> return [f for f in mctx.existing()]
--- a/tests/test-flags.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-flags.t Wed Jun 06 13:31:24 2018 -0400
@@ -46,6 +46,7 @@
adding file changes
added 1 changesets with 0 changes to 0 files (+1 heads)
new changesets 7f4313b42a34
+ 1 changesets became public
(run 'hg heads' to see heads, 'hg merge' to merge)
$ hg heads
changeset: 2:7f4313b42a34
--- a/tests/test-fuzz-targets.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-fuzz-targets.t Wed Jun 06 13:31:24 2018 -0400
@@ -1,5 +1,17 @@
-#require clang-libfuzzer test-repo
+#require test-repo
+
$ cd $TESTDIR/../contrib/fuzz
- $ make
-Just run the fuzzer for five seconds to verify it works at all.
+
+#if clang-libfuzzer
+ $ make -s clean all
+#endif
+#if no-clang-libfuzzer clang-6.0
+ $ make -s clean all CC=clang-6.0 CXX=clang++-6.0
+#endif
+#if no-clang-libfuzzer no-clang-6.0
+ $ exit 80
+#endif
+
+Just run the fuzzers for five seconds each to verify it works at all.
$ ./bdiff -max_total_time 5
+ $ ./xdiff -max_total_time 5
--- a/tests/test-githelp.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-githelp.t Wed Jun 06 13:31:24 2018 -0400
@@ -43,8 +43,8 @@
githelp on a command with unrecognized option packed with other options should fail with error
$ hg githelp -- commit -pv
- abort: unknown option v packed with other options
- Please try passing the option as it's own flag: -v
+ abort: unknown option 'v' packed with other options
+ (please try passing the option as its own flag: -v)
[255]
githelp for git rebase --skip
@@ -165,11 +165,11 @@
hg update .~3
$ hg githelp -- reset --mixed HEAD
- NOTE: --mixed has no meaning since Mercurial has no staging area
+ note: --mixed has no meaning since Mercurial has no staging area
hg update .
$ hg githelp -- reset --soft HEAD
- NOTE: --soft has no meaning since Mercurial has no staging area
+ note: --soft has no meaning since Mercurial has no staging area
hg update .
$ hg githelp -- reset --hard HEAD
@@ -221,7 +221,7 @@
githelp for whatchanged should show deprecated message
$ hg githelp -- whatchanged -p
- This command has been deprecated in the git project, thus isn't supported by this tool.
+ this command has been deprecated in the git project, thus isn't supported by this tool
githelp for git branch -m renaming
@@ -259,8 +259,8 @@
git merge-base
$ hg githelp -- git merge-base --is-ancestor
ignoring unknown option --is-ancestor
- NOTE: ancestors() is part of the revset language.
- Learn more about revsets with 'hg help revsets'
+ note: ancestors() is part of the revset language
+ (learn more about revsets with 'hg help revsets')
hg log -T '{node}\n' -r 'ancestor(A,B)'
@@ -279,7 +279,7 @@
hg commit --interactive
$ hg githelp -- git add --all
- note: use hg addremove to remove files that have been deleted.
+ note: use hg addremove to remove files that have been deleted
hg add
@@ -288,9 +288,9 @@
$ hg githelp -- git reflog
hg journal
- note: in hg commits can be deleted from repo but we always have backups.
+ note: in hg commits can be deleted from repo but we always have backups
$ hg githelp -- git reflog --all
hg journal --all
- note: in hg commits can be deleted from repo but we always have backups.
+ note: in hg commits can be deleted from repo but we always have backups
--- a/tests/test-globalopts.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-globalopts.t Wed Jun 06 13:31:24 2018 -0400
@@ -353,6 +353,7 @@
color Colorizing Outputs
config Configuration Files
dates Date Formats
+ deprecated Deprecated Features
diffs Diff Formats
environment Environment Variables
extensions Using Additional Features
@@ -436,6 +437,7 @@
color Colorizing Outputs
config Configuration Files
dates Date Formats
+ deprecated Deprecated Features
diffs Diff Formats
environment Environment Variables
extensions Using Additional Features
--- a/tests/test-graft.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-graft.t Wed Jun 06 13:31:24 2018 -0400
@@ -213,7 +213,7 @@
my e@1905859650ec+ other e@9c233e8e184d ancestor e@4c60f11aa304
warning: conflicts while merging e! (edit, then use 'hg resolve --mark')
abort: unresolved conflicts, can't continue
- (use 'hg resolve' and 'hg graft --continue --log')
+ (use 'hg resolve' and 'hg graft --continue')
[255]
Summary should mention graft:
@@ -1373,3 +1373,195 @@
note: graft of 7:d3c3f2b38ecc created no changes to commit
$ cd ..
+
+Testing the reading of old format graftstate file with newer mercurial
+
+ $ hg init oldgraft
+ $ cd oldgraft
+ $ for ch in a b c; do echo foo > $ch; hg add $ch; hg ci -Aqm "added "$ch; done;
+ $ hg log -GT "{rev}:{node|short} {desc}\n"
+ @ 2:8be98ac1a569 added c
+ |
+ o 1:80e6d2c47cfe added b
+ |
+ o 0:f7ad41964313 added a
+
+ $ hg up 0
+ 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ $ echo bar > b
+ $ hg add b
+ $ hg ci -m "bar to b"
+ created new head
+ $ hg graft -r 1 -r 2
+ grafting 1:80e6d2c47cfe "added b"
+ merging b
+ warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
+ abort: unresolved conflicts, can't continue
+ (use 'hg resolve' and 'hg graft --continue')
+ [255]
+
+Writing the nodes in old format to graftstate
+
+ $ hg log -r 1 -r 2 -T '{node}\n' > .hg/graftstate
+ $ echo foo > b
+ $ hg resolve -m
+ (no more unresolved files)
+ continue: hg graft --continue
+ $ hg graft --continue
+ grafting 1:80e6d2c47cfe "added b"
+ grafting 2:8be98ac1a569 "added c"
+
+Testing that --user is preserved during conflicts and value is reused while
+running `hg graft --continue`
+
+ $ hg log -G
+ @ changeset: 5:711e9fa999f1
+ | tag: tip
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: added c
+ |
+ o changeset: 4:e5ad7353b408
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: added b
+ |
+ o changeset: 3:9e887f7a939c
+ | parent: 0:f7ad41964313
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: bar to b
+ |
+ | o changeset: 2:8be98ac1a569
+ | | user: test
+ | | date: Thu Jan 01 00:00:00 1970 +0000
+ | | summary: added c
+ | |
+ | o changeset: 1:80e6d2c47cfe
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: added b
+ |
+ o changeset: 0:f7ad41964313
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: added a
+
+
+ $ hg up '.^^'
+ 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+
+ $ hg graft -r 1 -r 2 --user batman
+ grafting 1:80e6d2c47cfe "added b"
+ merging b
+ warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
+ abort: unresolved conflicts, can't continue
+ (use 'hg resolve' and 'hg graft --continue')
+ [255]
+
+ $ echo wat > b
+ $ hg resolve -m
+ (no more unresolved files)
+ continue: hg graft --continue
+
+ $ hg graft --continue
+ grafting 1:80e6d2c47cfe "added b"
+ grafting 2:8be98ac1a569 "added c"
+
+ $ hg log -Gr 3::
+ @ changeset: 7:11a36ffaacf2
+ | tag: tip
+ | user: batman
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: added c
+ |
+ o changeset: 6:76803afc6511
+ | parent: 3:9e887f7a939c
+ | user: batman
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: added b
+ |
+ | o changeset: 5:711e9fa999f1
+ | | user: test
+ | | date: Thu Jan 01 00:00:00 1970 +0000
+ | | summary: added c
+ | |
+ | o changeset: 4:e5ad7353b408
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: added b
+ |
+ o changeset: 3:9e887f7a939c
+ | parent: 0:f7ad41964313
+ ~ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: bar to b
+
+Test that --date is preserved and reused in `hg graft --continue`
+
+ $ hg up '.^^'
+ 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg graft -r 1 -r 2 --date '1234560000 120'
+ grafting 1:80e6d2c47cfe "added b"
+ merging b
+ warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
+ abort: unresolved conflicts, can't continue
+ (use 'hg resolve' and 'hg graft --continue')
+ [255]
+
+ $ echo foobar > b
+ $ hg resolve -m
+ (no more unresolved files)
+ continue: hg graft --continue
+ $ hg graft --continue
+ grafting 1:80e6d2c47cfe "added b"
+ grafting 2:8be98ac1a569 "added c"
+
+ $ hg log -Gr '.^^::.'
+ @ changeset: 9:1896b76e007a
+ | tag: tip
+ | user: test
+ | date: Fri Feb 13 21:18:00 2009 -0002
+ | summary: added c
+ |
+ o changeset: 8:ce2b4f1632af
+ | parent: 3:9e887f7a939c
+ | user: test
+ | date: Fri Feb 13 21:18:00 2009 -0002
+ | summary: added b
+ |
+ o changeset: 3:9e887f7a939c
+ | parent: 0:f7ad41964313
+ ~ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: bar to b
+
+Test that --log is preserved and reused in `hg graft --continue`
+
+ $ hg up '.^^'
+ 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg graft -r 1 -r 2 --log
+ grafting 1:80e6d2c47cfe "added b"
+ merging b
+ warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
+ abort: unresolved conflicts, can't continue
+ (use 'hg resolve' and 'hg graft --continue')
+ [255]
+
+ $ echo foobar > b
+ $ hg resolve -m
+ (no more unresolved files)
+ continue: hg graft --continue
+
+ $ hg graft --continue
+ grafting 1:80e6d2c47cfe "added b"
+ grafting 2:8be98ac1a569 "added c"
+
+ $ hg log -GT "{rev}:{node|short} {desc}" -r '.^^::.'
+ @ 11:30c1050a58b2 added c
+ | (grafted from 8be98ac1a56990c2d9ca6861041b8390af7bd6f3)
+ o 10:ec7eda2313e2 added b
+ | (grafted from 80e6d2c47cfe5b3185519568327a17a061c7efb6)
+ o 3:9e887f7a939c bar to b
+ |
+ ~
--- a/tests/test-grep.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-grep.t Wed Jun 06 13:31:24 2018 -0400
@@ -250,8 +250,11 @@
$ hg stat
M port2
$ hg grep -r 'wdir()' port
- abort: working directory revision cannot be specified
- [255]
+ port2:2147483647:export
+ port2:2147483647:vaportight
+ port2:2147483647:import/export
+ port2:2147483647:deport
+ port2:2147483647:wport
$ cd ..
$ hg init t2
@@ -368,3 +371,14 @@
binfile.bin:0:+: Binary file matches
$ cd ..
+
+Fix_Wdir(): test that passing wdir() t -r flag does greps on the
+files modified in the working directory
+
+ $ cd a
+ $ echo "abracadara" >> a
+ $ hg add a
+ $ hg grep -r "wdir()" "abra"
+ a:2147483647:abracadara
+
+ $ cd ..
--- a/tests/test-hardlinks.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-hardlinks.t Wed Jun 06 13:31:24 2018 -0400
@@ -122,7 +122,7 @@
$ cd r3/d1
>>> f = open('data1', 'wb')
>>> for x in range(10000):
- ... f.write("%s\n" % str(x))
+ ... f.write(b"%d\n" % x) and None
>>> f.close()
$ for j in 0 1 2 3 4 5 6 7 8 9; do
> cat data1 >> f2
--- a/tests/test-help.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-help.t Wed Jun 06 13:31:24 2018 -0400
@@ -110,6 +110,7 @@
color Colorizing Outputs
config Configuration Files
dates Date Formats
+ deprecated Deprecated Features
diffs Diff Formats
environment Environment Variables
extensions Using Additional Features
@@ -189,6 +190,7 @@
color Colorizing Outputs
config Configuration Files
dates Date Formats
+ deprecated Deprecated Features
diffs Diff Formats
environment Environment Variables
extensions Using Additional Features
@@ -889,6 +891,7 @@
color Colorizing Outputs
config Configuration Files
dates Date Formats
+ deprecated Deprecated Features
diffs Diff Formats
environment Environment Variables
extensions Using Additional Features
@@ -2032,6 +2035,13 @@
Date Formats
</td></tr>
<tr><td>
+ <a href="/help/deprecated">
+ deprecated
+ </a>
+ </td><td>
+ Deprecated Features
+ </td></tr>
+ <tr><td>
<a href="/help/diffs">
diffs
</a>
--- a/tests/test-hgrc.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-hgrc.t Wed Jun 06 13:31:24 2018 -0400
@@ -199,10 +199,10 @@
$ cat > plain.py <<EOF
> from mercurial import commands, extensions
> def _config(orig, ui, repo, *values, **opts):
- > ui.write('plain: %r\n' % ui.plain())
+ > ui.write(b'plain: %r\n' % ui.plain())
> return orig(ui, repo, *values, **opts)
> def uisetup(ui):
- > extensions.wrapcommand(commands.table, 'config', _config)
+ > extensions.wrapcommand(commands.table, b'config', _config)
> EOF
$ echo "[extensions]" >> $HGRC
$ echo "plain=./plain.py" >> $HGRC
--- a/tests/test-hgweb-auth.py Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-hgweb-auth.py Wed Jun 06 13:31:24 2018 -0400
@@ -3,10 +3,14 @@
from mercurial import demandimport; demandimport.enable()
from mercurial import (
error,
+ pycompat,
ui as uimod,
url,
util,
)
+from mercurial.utils import (
+ stringutil,
+)
urlerr = util.urlerr
urlreq = util.urlreq
@@ -23,12 +27,8 @@
ui.setconfig('auth', name, value)
return ui
-def dumpdict(dict):
- return '{' + ', '.join(['%s: %s' % (k, dict[k])
- for k in sorted(dict)]) + '}'
-
def test(auth, urls=None):
- print('CFG:', dumpdict(auth))
+ print('CFG:', pycompat.sysstr(stringutil.pprint(auth, bprefix=True)))
prefixes = set()
for k in auth:
prefixes.add(k.split('.', 1)[0])
--- a/tests/test-hgweb-auth.py.out Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-hgweb-auth.py.out Wed Jun 06 13:31:24 2018 -0400
@@ -1,7 +1,7 @@
*** Test in-uri schemes
-CFG: {x.prefix: http://example.org}
+CFG: {b'x.prefix': b'http://example.org'}
URI: http://example.org/foo
('x', 'x')
URI: http://example.org/foo/bar
@@ -18,7 +18,7 @@
abort
URI: https://y@example.org/bar
abort
-CFG: {x.prefix: https://example.org}
+CFG: {b'x.prefix': b'https://example.org'}
URI: http://example.org/foo
abort
URI: http://example.org/foo/bar
@@ -35,7 +35,7 @@
('x', 'x')
URI: https://y@example.org/bar
abort
-CFG: {x.prefix: http://example.org, x.schemes: https}
+CFG: {b'x.prefix': b'http://example.org', b'x.schemes': b'https'}
URI: http://example.org/foo
('x', 'x')
URI: http://example.org/foo/bar
@@ -52,7 +52,7 @@
abort
URI: https://y@example.org/bar
abort
-CFG: {x.prefix: https://example.org, x.schemes: http}
+CFG: {b'x.prefix': b'https://example.org', b'x.schemes': b'http'}
URI: http://example.org/foo
abort
URI: http://example.org/foo/bar
@@ -72,7 +72,7 @@
*** Test separately configured schemes
-CFG: {x.prefix: example.org, x.schemes: http}
+CFG: {b'x.prefix': b'example.org', b'x.schemes': b'http'}
URI: http://example.org/foo
('x', 'x')
URI: http://example.org/foo/bar
@@ -89,7 +89,7 @@
abort
URI: https://y@example.org/bar
abort
-CFG: {x.prefix: example.org, x.schemes: https}
+CFG: {b'x.prefix': b'example.org', b'x.schemes': b'https'}
URI: http://example.org/foo
abort
URI: http://example.org/foo/bar
@@ -106,7 +106,7 @@
('x', 'x')
URI: https://y@example.org/bar
abort
-CFG: {x.prefix: example.org, x.schemes: http https}
+CFG: {b'x.prefix': b'example.org', b'x.schemes': b'http https'}
URI: http://example.org/foo
('x', 'x')
URI: http://example.org/foo/bar
@@ -126,7 +126,7 @@
*** Test prefix matching
-CFG: {x.prefix: http://example.org/foo, y.prefix: http://example.org/bar}
+CFG: {b'x.prefix': b'http://example.org/foo', b'y.prefix': b'http://example.org/bar'}
URI: http://example.org/foo
('x', 'x')
URI: http://example.org/foo/bar
@@ -143,7 +143,7 @@
abort
URI: https://y@example.org/bar
abort
-CFG: {x.prefix: http://example.org/foo, y.prefix: http://example.org/foo/bar}
+CFG: {b'x.prefix': b'http://example.org/foo', b'y.prefix': b'http://example.org/foo/bar'}
URI: http://example.org/foo
('x', 'x')
URI: http://example.org/foo/bar
@@ -160,7 +160,7 @@
abort
URI: https://y@example.org/bar
abort
-CFG: {x.prefix: *, y.prefix: https://example.org/bar}
+CFG: {b'x.prefix': b'*', b'y.prefix': b'https://example.org/bar'}
URI: http://example.org/foo
abort
URI: http://example.org/foo/bar
@@ -180,13 +180,13 @@
*** Test user matching
-CFG: {x.password: xpassword, x.prefix: http://example.org/foo, x.username: None}
+CFG: {b'x.password': b'xpassword', b'x.prefix': b'http://example.org/foo', b'x.username': None}
URI: http://y@example.org/foo
('y', 'xpassword')
-CFG: {x.password: xpassword, x.prefix: http://example.org/foo, x.username: None, y.password: ypassword, y.prefix: http://example.org/foo, y.username: y}
+CFG: {b'x.password': b'xpassword', b'x.prefix': b'http://example.org/foo', b'x.username': None, b'y.password': b'ypassword', b'y.prefix': b'http://example.org/foo', b'y.username': b'y'}
URI: http://y@example.org/foo
('y', 'ypassword')
-CFG: {x.password: xpassword, x.prefix: http://example.org/foo/bar, x.username: None, y.password: ypassword, y.prefix: http://example.org/foo, y.username: y}
+CFG: {b'x.password': b'xpassword', b'x.prefix': b'http://example.org/foo/bar', b'x.username': None, b'y.password': b'ypassword', b'y.prefix': b'http://example.org/foo', b'y.username': b'y'}
URI: http://y@example.org/foo/bar
('y', 'xpassword')
--- a/tests/test-hgweb-descend-empties.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-hgweb-descend-empties.t Wed Jun 06 13:31:24 2018 -0400
@@ -93,11 +93,7 @@
</tr>
</thead>
<tbody class="stripes2">
- <tr class="fileline">
- <td class="name"><a href="/file/tip/">[up]</a></td>
- <td class="size"></td>
- <td class="permissions">drwxr-xr-x</td>
- </tr>
+
<tr class="fileline">
<td class="name">
@@ -213,11 +209,7 @@
</tr>
</thead>
<tbody class="stripes2">
- <tr class="fileline">
- <td class="name"><a href="/file/tip/?style=coal">[up]</a></td>
- <td class="size"></td>
- <td class="permissions">drwxr-xr-x</td>
- </tr>
+
<tr class="fileline">
<td class="name">
@@ -320,13 +312,7 @@
<p class="files">/ <span class="logtags"><span class="phasetag" title="draft">draft</span> <span class="branchtag" title="default">default</span> <span class="tagtag" title="tip">tip</span> </span></p>
<table>
- <tr class="parity0">
- <td>drwxr-xr-x</td>
- <td></td>
- <td></td>
- <td><a href="/file/tip/?style=monoblue">[up]</a></td>
- <td class="link"> </td>
- </tr>
+
<tr class="parity1">
<td>drwxr-xr-x</td>
@@ -433,13 +419,7 @@
<div class="title">/ <span class="logtags"><span class="phasetag" title="draft">draft</span> <span class="branchtag" title="default">default</span> <span class="tagtag" title="tip">tip</span> </span></div>
<table cellspacing="0">
- <tr class="parity0">
- <td style="font-family:monospace">drwxr-xr-x</td>
- <td style="font-family:monospace"></td>
- <td style="font-family:monospace"></td>
- <td><a href="/file/tip/?style=gitweb">[up]</a></td>
- <td class="link"> </td>
- </tr>
+
<tr class="parity1">
<td style="font-family:monospace">drwxr-xr-x</td>
@@ -524,40 +504,41 @@
<h2><a href="/">Mercurial</a> / files for changeset <a href="/rev/c9f45f7a1659">c9f45f7a1659</a>: /</h2>
<table cellpadding="0" cellspacing="0">
- <tr class="parity0">
- <td><tt>drwxr-xr-x</tt>
- <td>
- <td>
- <td><a href="/file/tip/?style=spartan">[up]</a>
- </tr>
+
<tr class="parity1">
- <td><tt>drwxr-xr-x</tt>
- <td>
- <td>
+ <td><tt>drwxr-xr-x</tt> </td>
+ <td> </td>
+ <td> </td>
<td>
<a href="/file/tip/a1?style=spartan">a1/</a>
<a href="/file/tip/a1/a2/a3/a4?style=spartan">
a2/a3/a4
</a>
+ </td>
+ </tr>
<tr class="parity0">
- <td><tt>drwxr-xr-x</tt>
- <td>
- <td>
+ <td><tt>drwxr-xr-x</tt> </td>
+ <td> </td>
+ <td> </td>
<td>
<a href="/file/tip/b1?style=spartan">b1/</a>
<a href="/file/tip/b1/b2/b3?style=spartan">
b2/b3
</a>
+ </td>
+ </tr>
<tr class="parity1">
- <td><tt>drwxr-xr-x</tt>
- <td>
- <td>
+ <td><tt>drwxr-xr-x</tt> </td>
+ <td> </td>
+ <td> </td>
<td>
<a href="/file/tip/d1?style=spartan">d1/</a>
<a href="/file/tip/d1/d2?style=spartan">
d2
</a>
+ </td>
+ </tr>
</table>
--- a/tests/test-hgweb-empty.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-hgweb-empty.t Wed Jun 06 13:31:24 2018 -0400
@@ -396,11 +396,7 @@
</tr>
</thead>
<tbody class="stripes2">
- <tr class="fileline">
- <td class="name"><a href="/file/tip/">[up]</a></td>
- <td class="size"></td>
- <td class="permissions">drwxr-xr-x</td>
- </tr>
+
</tbody>
--- a/tests/test-hgweb-json.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-hgweb-json.t Wed Jun 06 13:31:24 2018 -0400
@@ -101,7 +101,7 @@
$ echo '[web]' >> .hg/hgrc
- $ echo 'allow_archive = bz2' >> .hg/hgrc
+ $ echo 'allow-archive = bz2' >> .hg/hgrc
$ hg serve -p $HGPORT -d --pid-file=hg.pid -A access.log -E error.log
$ cat hg.pid >> $DAEMON_PIDS
@@ -1914,6 +1914,10 @@
"topic": "dates"
},
{
+ "summary": "Deprecated Features",
+ "topic": "deprecated"
+ },
+ {
"summary": "Diff Formats",
"topic": "diffs"
},
--- a/tests/test-hgweb-symrev.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-hgweb-symrev.t Wed Jun 06 13:31:24 2018 -0400
@@ -30,7 +30,7 @@
|
o 0:43c799df6e75
- $ hg serve --config web.allow_archive=zip -n test -p $HGPORT -d --pid-file=hg.pid -E errors.log
+ $ hg serve --config web.allow-archive=zip -n test -p $HGPORT -d --pid-file=hg.pid -E errors.log
$ cat hg.pid >> $DAEMON_PIDS
$ REVLINKS='href=[^>]+(rev=|/)(43c799df6e75|0|a7c1559b7bba|1|xyzzy|9d8c40cba617|2|tip|default)'
@@ -72,7 +72,6 @@
<li><a href="/rev/tip?style=paper">changeset</a></li>
<a href="/archive/tip.zip">zip</a>
directory / @ 2:<a href="/rev/9d8c40cba617?style=paper">9d8c40cba617</a>
- <td class="name"><a href="/file/tip/?style=paper">[up]</a></td>
<a href="/file/tip/dir?style=paper">
<a href="/file/tip/dir/?style=paper">
<a href="/file/tip/foo?style=paper">
@@ -138,7 +137,6 @@
<li><a href="/rev/xyzzy?style=paper">changeset</a></li>
<a href="/archive/xyzzy.zip">zip</a>
directory / @ 1:<a href="/rev/a7c1559b7bba?style=paper">a7c1559b7bba</a>
- <td class="name"><a href="/file/xyzzy/?style=paper">[up]</a></td>
<a href="/file/xyzzy/dir?style=paper">
<a href="/file/xyzzy/dir/?style=paper">
<a href="/file/xyzzy/foo?style=paper">
@@ -272,7 +270,6 @@
<li><a href="/rev/tip?style=coal">changeset</a></li>
<a href="/archive/tip.zip">zip</a>
directory / @ 2:<a href="/rev/9d8c40cba617?style=coal">9d8c40cba617</a>
- <td class="name"><a href="/file/tip/?style=coal">[up]</a></td>
<a href="/file/tip/dir?style=coal">
<a href="/file/tip/dir/?style=coal">
<a href="/file/tip/foo?style=coal">
@@ -338,7 +335,6 @@
<li><a href="/rev/xyzzy?style=coal">changeset</a></li>
<a href="/archive/xyzzy.zip">zip</a>
directory / @ 1:<a href="/rev/a7c1559b7bba?style=coal">a7c1559b7bba</a>
- <td class="name"><a href="/file/xyzzy/?style=coal">[up]</a></td>
<a href="/file/xyzzy/dir?style=coal">
<a href="/file/xyzzy/dir/?style=coal">
<a href="/file/xyzzy/foo?style=coal">
@@ -520,7 +516,6 @@
$ "$TESTDIR/get-with-headers.py" $LOCALIP:$HGPORT 'file?style=gitweb' | egrep $REVLINKS
<a href="/rev/tip?style=gitweb">changeset</a> | <a href="/archive/tip.zip">zip</a> |
- <td><a href="/file/tip/?style=gitweb">[up]</a></td>
<a href="/file/tip/dir?style=gitweb">dir</a>
<a href="/file/tip/dir/?style=gitweb"></a>
<a href="/file/tip/dir?style=gitweb">files</a>
@@ -594,7 +589,6 @@
$ "$TESTDIR/get-with-headers.py" $LOCALIP:$HGPORT 'file/xyzzy?style=gitweb' | egrep $REVLINKS
<a href="/rev/xyzzy?style=gitweb">changeset</a> | <a href="/archive/xyzzy.zip">zip</a> |
- <td><a href="/file/xyzzy/?style=gitweb">[up]</a></td>
<a href="/file/xyzzy/dir?style=gitweb">dir</a>
<a href="/file/xyzzy/dir/?style=gitweb"></a>
<a href="/file/xyzzy/dir?style=gitweb">files</a>
@@ -760,7 +754,6 @@
<li><a href="/graph/tip?style=monoblue">graph</a></li>
<li><a href="/rev/tip?style=monoblue">changeset</a></li>
<li><a href="/archive/tip.zip">zip</a></li>
- <td><a href="/file/tip/?style=monoblue">[up]</a></td>
<a href="/file/tip/dir?style=monoblue">dir</a>
<a href="/file/tip/dir/?style=monoblue"></a>
<td><a href="/file/tip/dir?style=monoblue">files</a></td>
@@ -823,7 +816,6 @@
<li><a href="/graph/xyzzy?style=monoblue">graph</a></li>
<li><a href="/rev/xyzzy?style=monoblue">changeset</a></li>
<li><a href="/archive/xyzzy.zip">zip</a></li>
- <td><a href="/file/xyzzy/?style=monoblue">[up]</a></td>
<a href="/file/xyzzy/dir?style=monoblue">dir</a>
<a href="/file/xyzzy/dir/?style=monoblue"></a>
<td><a href="/file/xyzzy/dir?style=monoblue">files</a></td>
@@ -964,10 +956,9 @@
<a href="/rev/tip?style=spartan">changeset</a>
<a href="/archive/tip.zip">zip</a>
<h2><a href="/">Mercurial</a> / files for changeset <a href="/rev/9d8c40cba617">9d8c40cba617</a>: /</h2>
- <td><a href="/file/tip/?style=spartan">[up]</a>
<a href="/file/tip/dir?style=spartan">dir/</a>
<a href="/file/tip/dir/?style=spartan">
- <td><a href="/file/tip/foo?style=spartan">foo</a>
+ <td><a href="/file/tip/foo?style=spartan">foo</a></td>
$ "$TESTDIR/get-with-headers.py" $LOCALIP:$HGPORT 'shortlog?style=spartan&rev=all()' | egrep $REVLINKS
<a href="/archive/tip.zip">zip</a>
@@ -1037,10 +1028,9 @@
<a href="/rev/xyzzy?style=spartan">changeset</a>
<a href="/archive/xyzzy.zip">zip</a>
<h2><a href="/">Mercurial</a> / files for changeset <a href="/rev/a7c1559b7bba">a7c1559b7bba</a>: /</h2>
- <td><a href="/file/xyzzy/?style=spartan">[up]</a>
<a href="/file/xyzzy/dir?style=spartan">dir/</a>
<a href="/file/xyzzy/dir/?style=spartan">
- <td><a href="/file/xyzzy/foo?style=spartan">foo</a>
+ <td><a href="/file/xyzzy/foo?style=spartan">foo</a></td>
$ "$TESTDIR/get-with-headers.py" $LOCALIP:$HGPORT 'file/xyzzy/foo?style=spartan' | egrep $REVLINKS
<a href="/log/xyzzy?style=spartan">changelog</a>
--- a/tests/test-hgweb.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-hgweb.t Wed Jun 06 13:31:24 2018 -0400
@@ -287,11 +287,7 @@
</tr>
</thead>
<tbody class="stripes2">
- <tr class="fileline">
- <td class="name"><a href="/file/tip/">[up]</a></td>
- <td class="size"></td>
- <td class="permissions">drwxr-xr-x</td>
- </tr>
+
<tr class="fileline">
<td class="name">
@@ -802,6 +798,29 @@
200 Script output follows
changeset 0:<a href="/rev/2ef0ac749a14?style=paper">2ef0ac749a14</a>
+no '[up]' entry in file view when in root directory
+
+ $ get-with-headers.py localhost:$HGPORT 'file/tip?style=paper' | grep -F '[up]'
+ [1]
+ $ get-with-headers.py localhost:$HGPORT 'file/tip/da?style=paper' | grep -F '[up]'
+ <a href="/file/tip/?style=paper">[up]</a>
+ $ get-with-headers.py localhost:$HGPORT 'file/tip?style=coal' | grep -F '[up]'
+ [1]
+ $ get-with-headers.py localhost:$HGPORT 'file/tip/da?style=coal' | grep -F '[up]'
+ <a href="/file/tip/?style=coal">[up]</a>
+ $ get-with-headers.py localhost:$HGPORT 'file/tip?style=gitweb' | grep -F '[up]'
+ [1]
+ $ get-with-headers.py localhost:$HGPORT 'file/tip/da?style=gitweb' | grep -F '[up]'
+ <a href="/file/tip/?style=gitweb">[up]</a>
+ $ get-with-headers.py localhost:$HGPORT 'file/tip?style=monoblue' | grep -F '[up]'
+ [1]
+ $ get-with-headers.py localhost:$HGPORT 'file/tip/da?style=monoblue' | grep -F '[up]'
+ <a href="/file/tip/?style=monoblue">[up]</a>
+ $ get-with-headers.py localhost:$HGPORT 'file/tip?style=spartan' | grep -F '[up]'
+ [1]
+ $ get-with-headers.py localhost:$HGPORT 'file/tip/da?style=spartan' | grep -F '[up]'
+ <a href="/file/tip/?style=spartan">[up]</a>
+
no style can be loaded from directories other than the specified paths
$ mkdir -p x/templates/fallback
--- a/tests/test-hgwebdir-paths.py Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-hgwebdir-paths.py Wed Jun 06 13:31:24 2018 -0400
@@ -10,30 +10,30 @@
)
hgwebdir = hgwebdir_mod.hgwebdir
-os.mkdir('webdir')
-os.chdir('webdir')
+os.mkdir(b'webdir')
+os.chdir(b'webdir')
-webdir = os.path.realpath('.')
+webdir = os.path.realpath(b'.')
u = uimod.ui.load()
-hg.repository(u, 'a', create=1)
-hg.repository(u, 'b', create=1)
-os.chdir('b')
-hg.repository(u, 'd', create=1)
-os.chdir('..')
-hg.repository(u, 'c', create=1)
-os.chdir('..')
+hg.repository(u, b'a', create=1)
+hg.repository(u, b'b', create=1)
+os.chdir(b'b')
+hg.repository(u, b'd', create=1)
+os.chdir(b'..')
+hg.repository(u, b'c', create=1)
+os.chdir(b'..')
-paths = {'t/a/': '%s/a' % webdir,
- 'b': '%s/b' % webdir,
- 'coll': '%s/*' % webdir,
- 'rcoll': '%s/**' % webdir}
+paths = {b't/a/': b'%s/a' % webdir,
+ b'b': b'%s/b' % webdir,
+ b'coll': b'%s/*' % webdir,
+ b'rcoll': b'%s/**' % webdir}
-config = os.path.join(webdir, 'hgwebdir.conf')
-configfile = open(config, 'w')
-configfile.write('[paths]\n')
+config = os.path.join(webdir, b'hgwebdir.conf')
+configfile = open(config, 'wb')
+configfile.write(b'[paths]\n')
for k, v in paths.items():
- configfile.write('%s = %s\n' % (k, v))
+ configfile.write(b'%s = %s\n' % (k, v))
configfile.close()
confwd = hgwebdir(config)
--- a/tests/test-http-api-httpv2.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-http-api-httpv2.t Wed Jun 06 13:31:24 2018 -0400
@@ -1,3 +1,5 @@
+#require no-chg
+
$ . $TESTDIR/wireprotohelpers.sh
$ enabledummycommands
--- a/tests/test-http-api.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-http-api.t Wed Jun 06 13:31:24 2018 -0400
@@ -1,3 +1,5 @@
+#require no-chg
+
$ send() {
> hg --verbose debugwireproto --peer raw http://$LOCALIP:$HGPORT/
> }
--- a/tests/test-http-bad-server.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-http-bad-server.t Wed Jun 06 13:31:24 2018 -0400
@@ -1,4 +1,4 @@
-#require killdaemons serve zstd
+#require serve zstd
Client version is embedded in HTTP request and is effectively dynamic. Pin the
version so behavior is deterministic.
--- a/tests/test-http-branchmap.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-http-branchmap.t Wed Jun 06 13:31:24 2018 -0400
@@ -1,5 +1,3 @@
-#require killdaemons
-
$ hgserve() {
> hg serve -a localhost -p $HGPORT1 -d --pid-file=hg.pid \
> -E errors.log -v $@ > startup.log
@@ -60,8 +58,8 @@
verify 7e7d56fe4833 (encoding fallback in branchmap to maintain compatibility with 1.3.x)
$ cat <<EOF > oldhg
- > import sys
- > from mercurial import ui, hg, commands
+ > import threading
+ > from mercurial import dispatch, hg, ui, wireprotoserver
>
> class StdoutWrapper(object):
> def __init__(self, stdout):
@@ -79,12 +77,12 @@
> def __getattr__(self, name):
> return getattr(self._file, name)
>
- > sys.stdout = StdoutWrapper(getattr(sys.stdout, 'buffer', sys.stdout))
- > sys.stderr = StdoutWrapper(getattr(sys.stderr, 'buffer', sys.stderr))
- >
+ > dispatch.initstdio()
> myui = ui.ui.load()
+ > fout = StdoutWrapper(myui.fout)
+ > myui.fout = myui.ferr
> repo = hg.repository(myui, b'a')
- > commands.serve(myui, repo, stdio=True, cmdserver=False)
+ > wireprotoserver._runsshserver(myui, repo, myui.fin, fout, threading.Event())
> EOF
$ echo baz >> b/foo
$ hg -R b ci -m baz
--- a/tests/test-http-permissions.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-http-permissions.t Wed Jun 06 13:31:24 2018 -0400
@@ -1,5 +1,3 @@
-#require killdaemons
-
$ cat > fakeremoteuser.py << EOF
> import os
> from mercurial.hgweb import hgweb_mod
--- a/tests/test-http-protocol.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-http-protocol.t Wed Jun 06 13:31:24 2018 -0400
@@ -1,3 +1,5 @@
+#require no-chg
+
$ . $TESTDIR/wireprotohelpers.sh
$ cat >> $HGRCPATH << EOF
--- a/tests/test-http.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-http.t Wed Jun 06 13:31:24 2018 -0400
@@ -1,4 +1,4 @@
-#require killdaemons serve
+#require serve
$ hg init test
$ cd test
--- a/tests/test-hybridencode.py Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-hybridencode.py Wed Jun 06 13:31:24 2018 -0400
@@ -1,471 +1,877 @@
from __future__ import absolute_import, print_function
+
+import unittest
+
from mercurial import (
store,
)
-def show(s):
- # show test input
- print("A = '%s'" % s.encode("string_escape"))
-
- # show the result of the C implementation, if available
- h = store._pathencode(s)
- print("B = '%s'" % h.encode("string_escape"))
+class hybridencodetests(unittest.TestCase):
+ def hybridencode(self, input, want):
- # compare it with reference implementation in Python
- r = store._hybridencode(s, True)
- if h != r:
- print("R = '%s'" % r.encode("string_escape"))
- print()
-
-show("data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&'()+,-.;=[]^`{}")
+ # Check the C implementation if it's in use
+ got = store._pathencode(input)
+ self.assertEqual(want, got)
+ # Check the reference implementation in Python
+ refgot = store._hybridencode(input, True)
+ self.assertEqual(want, refgot)
-print("uppercase char X is encoded as _x")
-show("data/ABCDEFGHIJKLMNOPQRSTUVWXYZ")
-
-print("underbar is doubled")
-show("data/_")
-
-print("tilde is character-encoded")
-show("data/~")
+ def testnoencodingrequired(self):
+ self.hybridencode(
+ b'data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&\'()+,-.;=[]^`{}',
+ b'data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&\'()+,-.;=[]^`{}')
-print("characters in ASCII code range 1..31")
-show('data/\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f'
- '\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f')
+ def testuppercasechars(self): # uppercase char X is encoded as _x
+ self.hybridencode(
+ b'data/ABCDEFGHIJKLMNOPQRSTUVWXYZ',
+ b'data/_a_b_c_d_e_f_g_h_i_j_k_l_m_n_o_p_q_r_s_t_u_v_w_x_y_z')
+
+ def testunderbar(self): # underbar is doubled
+ self.hybridencode(b'data/_', b'data/__')
-print("characters in ASCII code range 126..255")
-show('data/\x7e\x7f'
- '\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f'
- '\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f')
-show('data/\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9\xaa\xab\xac\xad\xae\xaf'
- '\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf')
-show('data/\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf'
- '\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf')
-show('data/\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed\xee\xef'
- '\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff')
+ def testtilde(self): # tilde is character-encoded
+ self.hybridencode(b'data/~', b'data/~7e')
-print("Windows reserved characters")
-show('data/less <, greater >, colon :, double-quote ", backslash \\'
- ', pipe |, question-mark ?, asterisk *')
-
-print("encoding directories ending in .hg, .i or .d with '.hg' suffix")
-show('data/x.h.i/x.hg/x.i/x.d/foo')
-show('data/a.hg/a.i/a.d/foo')
-show('data/au.hg/au.i/au.d/foo')
-show('data/aux.hg/aux.i/aux.d/foo')
-show('data/auxy.hg/auxy.i/auxy.d/foo')
+ def testcontrolchars(self): # characters in ASCII code range 1..31
+ self.hybridencode(
+ (b'data/\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f'
+ b'\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e'
+ b'\x1f'),
+ (b'data/~01~02~03~04~05~06~07~08~09~0a~0b~0c~0d~0e~0f~10~11~12~13'
+ b'~14~15~16~17~18~19~1a~1b~1c~1d~1e~1f'))
-print("but these are not encoded on *filenames*")
-show('data/foo/x.hg')
-show('data/foo/x.i')
-show('data/foo/x.d')
-show('data/foo/a.hg')
-show('data/foo/a.i')
-show('data/foo/a.d')
-show('data/foo/au.hg')
-show('data/foo/au.i')
-show('data/foo/au.d')
-show('data/foo/aux.hg')
-show('data/foo/aux.i')
-show('data/foo/aux.d')
-show('data/foo/auxy.hg')
-show('data/foo/auxy.i')
-show('data/foo/auxy.d')
+ def testhighascii(self):# characters in ASCII code range 126..255
+ self.hybridencode(
+ (b'data/~\x7f\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c'
+ b'\x8d\x8e\x8f\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b'
+ b'\x9c\x9d\x9e\x9f'),
+ (b'data/~7e~7f~80~81~82~83~84~85~86~87~88~89~8a~8b~8c~8d~8e~8f~90'
+ b'~91~92~93~94~95~96~97~98~99~9a~9b~9c~9d~9e~9f'))
+ self.hybridencode(
+ (b'data/\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9\xaa\xab\xac\xad'
+ b'\xae\xaf\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xba\xbb\xbc'
+ b'\xbd\xbe\xbf'),
+ (b'data/~a0~a1~a2~a3~a4~a5~a6~a7~a8~a9~aa~ab~ac~ad~ae~af~b0~b1~b2'
+ b'~b3~b4~b5~b6~b7~b8~b9~ba~bb~bc~bd~be~bf'))
+ self.hybridencode(
+ (b'data/\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca'
+ b'\xcb\xcc\xcd\xce\xcf\xd0\xd1\xd2\xd3\xd4\xd5\xd6'
+ b'\xd7\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf'),
+ (b'data/~c0~c1~c2~c3~c4~c5~c6~c7~c8~c9~ca~cb~cc~cd~ce~cf~d0~d1~d2'
+ b'~d3~d4~d5~d6~d7~d8~d9~da~db~dc~dd~de~df'))
+ self.hybridencode(
+ (b'data/\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed'
+ b'\xee\xef\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd'
+ b'\xfe\xff'),
+ (b'data/~e0~e1~e2~e3~e4~e5~e6~e7~e8~e9~ea~eb~ec~ed~ee~ef~f0~f1~f2'
+ b'~f3~f4~f5~f6~f7~f8~f9~fa~fb~fc~fd~fe~ff'))
-print("plain .hg, .i and .d directories have the leading dot encoded")
-show('data/.hg/.i/.d/foo')
-
-show('data/aux.bla/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c.i')
+ def testwinreserved(self): # Windows reserved characters
+ self.hybridencode(
+ (b'data/less <, greater >, colon :, double-quote ", backslash \\, '
+ b'pipe |, question-mark ?, asterisk *'),
+ (b'data/less ~3c, greater ~3e, colon ~3a, double-quote ~22, '
+ b'backslash ~5c, pipe ~7c, question-mark ~3f, asterisk ~2a'))
-show('data/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/'
- 'TENTH/ELEVENTH/LOREMIPSUM.TXT.i')
-show('data/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/'
- 'wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules'
- '.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider.i')
-show('data/AUX.THE-QUICK-BROWN-FOX-JU:MPS-OVER-THE-LAZY-DOG-THE-QUICK-'
- 'BROWN-FOX-JUMPS-OVER-THE-LAZY-DOG.TXT.i')
-show('data/Project Planning/Resources/AnotherLongDirectoryName/'
- 'Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt')
-show('data/Project.Planning/Resources/AnotherLongDirectoryName/'
- 'Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt')
-show('data/foo.../foo / /a./_. /__/.x../ bla/.FOO/something.i')
+ def testhgreserved(self):
+ # encoding directories ending in .hg, .i or .d with '.hg' suffix
+ self.hybridencode(b'data/x.h.i/x.hg/x.i/x.d/foo',
+ b'data/x.h.i.hg/x.hg.hg/x.i.hg/x.d.hg/foo')
+ self.hybridencode(b'data/a.hg/a.i/a.d/foo',
+ b'data/a.hg.hg/a.i.hg/a.d.hg/foo')
+ self.hybridencode(b'data/au.hg/au.i/au.d/foo',
+ b'data/au.hg.hg/au.i.hg/au.d.hg/foo')
+ self.hybridencode(b'data/aux.hg/aux.i/aux.d/foo',
+ b'data/au~78.hg.hg/au~78.i.hg/au~78.d.hg/foo')
+ self.hybridencode(b'data/auxy.hg/auxy.i/auxy.d/foo',
+ b'data/auxy.hg.hg/auxy.i.hg/auxy.d.hg/foo')
+ # but these are not encoded on *filenames*
+ self.hybridencode(b'data/foo/x.hg', b'data/foo/x.hg')
+ self.hybridencode(b'data/foo/x.i', b'data/foo/x.i')
+ self.hybridencode(b'data/foo/x.d', b'data/foo/x.d')
+ self.hybridencode(b'data/foo/a.hg', b'data/foo/a.hg')
+ self.hybridencode(b'data/foo/a.i', b'data/foo/a.i')
+ self.hybridencode(b'data/foo/a.d', b'data/foo/a.d')
+ self.hybridencode(b'data/foo/au.hg', b'data/foo/au.hg')
+ self.hybridencode(b'data/foo/au.i', b'data/foo/au.i')
+ self.hybridencode(b'data/foo/au.d', b'data/foo/au.d')
+ self.hybridencode(b'data/foo/aux.hg', b'data/foo/au~78.hg')
+ self.hybridencode(b'data/foo/aux.i', b'data/foo/au~78.i')
+ self.hybridencode(b'data/foo/aux.d', b'data/foo/au~78.d')
+ self.hybridencode(b'data/foo/auxy.hg', b'data/foo/auxy.hg')
+ self.hybridencode(b'data/foo/auxy.i', b'data/foo/auxy.i')
+ self.hybridencode(b'data/foo/auxy.d', b'data/foo/auxy.d')
-show('data/c/co/com/com0/com1/com2/com3/com4/com5/com6/com7/com8/com9')
-show('data/C/CO/COM/COM0/COM1/COM2/COM3/COM4/COM5/COM6/COM7/COM8/COM9')
-show('data/c.x/co.x/com.x/com0.x/com1.x/com2.x/com3.x/com4.x/com5.x'
- '/com6.x/com7.x/com8.x/com9.x')
-show('data/x.c/x.co/x.com0/x.com1/x.com2/x.com3/x.com4/x.com5'
- '/x.com6/x.com7/x.com8/x.com9')
-show('data/cx/cox/comx/com0x/com1x/com2x/com3x/com4x/com5x'
- '/com6x/com7x/com8x/com9x')
-show('data/xc/xco/xcom0/xcom1/xcom2/xcom3/xcom4/xcom5'
- '/xcom6/xcom7/xcom8/xcom9')
-
-show('data/l/lp/lpt/lpt0/lpt1/lpt2/lpt3/lpt4/lpt5/lpt6/lpt7/lpt8/lpt9')
-show('data/L/LP/LPT/LPT0/LPT1/LPT2/LPT3/LPT4/LPT5/LPT6/LPT7/LPT8/LPT9')
-show('data/l.x/lp.x/lpt.x/lpt0.x/lpt1.x/lpt2.x/lpt3.x/lpt4.x/lpt5.x'
- '/lpt6.x/lpt7.x/lpt8.x/lpt9.x')
-show('data/x.l/x.lp/x.lpt/x.lpt0/x.lpt1/x.lpt2/x.lpt3/x.lpt4/x.lpt5'
- '/x.lpt6/x.lpt7/x.lpt8/x.lpt9')
-show('data/lx/lpx/lptx/lpt0x/lpt1x/lpt2x/lpt3x/lpt4x/lpt5x'
- '/lpt6x/lpt7x/lpt8x/lpt9x')
-show('data/xl/xlp/xlpt/xlpt0/xlpt1/xlpt2/xlpt3/xlpt4/xlpt5'
- '/xlpt6/xlpt7/xlpt8/xlpt9')
-
-show('data/con/p/pr/prn/a/au/aux/n/nu/nul')
-show('data/CON/P/PR/PRN/A/AU/AUX/N/NU/NUL')
-show('data/con.x/p.x/pr.x/prn.x/a.x/au.x/aux.x/n.x/nu.x/nul.x')
-show('data/x.con/x.p/x.pr/x.prn/x.a/x.au/x.aux/x.n/x.nu/x.nul')
-show('data/conx/px/prx/prnx/ax/aux/auxx/nx/nux/nulx')
-show('data/xcon/xp/xpr/xprn/xa/xau/xaux/xn/xnu/xnul')
+ # plain .hg, .i and .d directories have the leading dot encoded
+ self.hybridencode(b'data/.hg/.i/.d/foo',
+ b'data/~2ehg.hg/~2ei.hg/~2ed.hg/foo')
-show('data/a./au./aux./auxy./aux.')
-show('data/c./co./con./cony./con.')
-show('data/p./pr./prn./prny./prn.')
-show('data/n./nu./nul./nuly./nul.')
-show('data/l./lp./lpt./lpt1./lpt1y./lpt1.')
-show('data/lpt9./lpt9y./lpt9.')
-show('data/com./com1./com1y./com1.')
-show('data/com9./com9y./com9.')
-
-show('data/a /au /aux /auxy /aux ')
-
-print("largest unhashed path")
-show('data/123456789-123456789-123456789-123456789-123456789-'
- 'unhashed--xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12345')
-
-print("shortest hashed path")
-show('data/123456789-123456789-123456789-123456789-123456789-'
- 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-123456')
-
-print("changing one char in part that's hashed away produces a different hash")
-show('data/123456789-123456789-123456789-123456789-123456789-'
- 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxy-'
- '123456789-123456')
+ def testmisclongcases(self):
+ self.hybridencode(
+ (b'data/aux.bla/bla.aux/prn/PRN/lpt/com3/nul/'
+ b'coma/foo.NUL/normal.c.i'),
+ (b'data/au~78.bla/bla.aux/pr~6e/_p_r_n/lpt/co~6d3'
+ b'/nu~6c/coma/foo._n_u_l/normal.c.i'))
+ self.hybridencode(
+ (b'data/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH'
+ b'/TENTH/ELEVENTH/LOREMIPSUM.TXT.i'),
+ (b'dh/au~78/second/x.prn/fourth/fi~3afth/sixth/seventh/eighth/'
+ b'nineth/tenth/loremia20419e358ddff1bf8751e38288aff1d7c32ec05.i'))
+ self.hybridencode(
+ (b'data/enterprise/openesbaddons/contrib-imola/corba-bc/'
+ b'netbeansplugin/wsdlExtension/src/main/java/META-INF/services'
+ b'/org.netbeans.modules.xml.wsdl.bindingsupport.spi.'
+ b'ExtensibilityElementTemplateProvider.i'),
+ (b'dh/enterpri/openesba/contrib-/corba-bc/netbeans/wsdlexte/src/'
+ b'main/java/org.net7018f27961fdf338a598a40c4683429e7ffb9743.i'))
+ self.hybridencode(
+ (b'data/AUX.THE-QUICK-BROWN-FOX-JU:MPS-OVER-THE-LAZY-DOG-THE-'
+ b'QUICK-BROWN-FOX-JUMPS-OVER-THE-LAZY-DOG.TXT.i'),
+ (b'dh/au~78.the-quick-brown-fox-ju~3amps-over-the-lazy-dog-the-'
+ b'quick-brown-fox-jud4dcadd033000ab2b26eb66bae1906bcb15d4a70.i'))
+ self.hybridencode(
+ (b'data/Project Planning/Resources/AnotherLongDirectoryName/Follow'
+ b'edbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt'),
+ (b'dh/project_/resource/anotherl/followed/andanoth/andthenanextrem'
+ b'elylongfilenaf93030515d9849cfdca52937c2204d19f83913e5.txt'))
+ self.hybridencode(
+ (b'data/Project.Planning/Resources/AnotherLongDirectoryName/Follo'
+ b'wedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt'),
+ (b'dh/project_/resource/anotherl/followed/andanoth/andthenanextre'
+ b'melylongfilena0fd7c506f5c9d58204444fc67e9499006bd2d445.txt'))
+ self.hybridencode(
+ b'data/foo.../foo / /a./_. /__/.x../ bla/.FOO/something.i',
+ (b'data/foo..~2e/foo ~20/~20/a~2e/__.~20/____/~2ex.~2e/~20 bla/'
+ b'~2e_f_o_o/something.i'))
+ self.hybridencode(
+ b'data/c/co/com/com0/com1/com2/com3/com4/com5/com6/com7/com8/com9',
+ (b'data/c/co/com/com0/co~6d1/co~6d2/co~6d3/co~6d4/co~6d5/co~6d6/'
+ b'co~6d7/co~6d8/co~6d9'))
+ self.hybridencode(
+ b'data/C/CO/COM/COM0/COM1/COM2/COM3/COM4/COM5/COM6/COM7/COM8/COM9',
+ (b'data/_c/_c_o/_c_o_m/_c_o_m0/_c_o_m1/_c_o_m2/_c_o_m3/_c_o_m4/'
+ b'_c_o_m5/_c_o_m6/_c_o_m7/_c_o_m8/_c_o_m9'))
+ self.hybridencode(
+ (b'data/c.x/co.x/com.x/com0.x/com1.x/com2.x/com3.x/com4.x/com5.x/'
+ b'com6.x/com7.x/com8.x/com9.x'),
+ (b'data/c.x/co.x/com.x/com0.x/co~6d1.x/co~6d2.x/co~6d3.x/co~6d4.x'
+ b'/co~6d5.x/co~6d6.x/co~6d7.x/co~6d8.x/co~6d9.x'))
+ self.hybridencode(
+ (b'data/x.c/x.co/x.com0/x.com1/x.com2/x.com3/x.com4/x.com5/x.com6'
+ b'/x.com7/x.com8/x.com9'),
+ (b'data/x.c/x.co/x.com0/x.com1/x.com2/x.com3/x.com4/x.com5/x.com6'
+ b'/x.com7/x.com8/x.com9'))
+ self.hybridencode(
+ (b'data/cx/cox/comx/com0x/com1x/com2x/com3x/com4x/com5x/com6x/'
+ b'com7x/com8x/com9x'),
+ (b'data/cx/cox/comx/com0x/com1x/com2x/com3x/com4x/com5x/com6x/'
+ b'com7x/com8x/com9x'))
+ self.hybridencode(
+ (b'data/xc/xco/xcom0/xcom1/xcom2/xcom3/xcom4/xcom5/xcom6/xcom7/'
+ b'xcom8/xcom9'),
+ (b'data/xc/xco/xcom0/xcom1/xcom2/xcom3/xcom4/xcom5/xcom6/xcom7/'
+ b'xcom8/xcom9'))
+ self.hybridencode(
+ b'data/l/lp/lpt/lpt0/lpt1/lpt2/lpt3/lpt4/lpt5/lpt6/lpt7/lpt8/lpt9',
+ (b'data/l/lp/lpt/lpt0/lp~741/lp~742/lp~743/lp~744/lp~745/lp~746/'
+ b'lp~747/lp~748/lp~749'))
+ self.hybridencode(
+ b'data/L/LP/LPT/LPT0/LPT1/LPT2/LPT3/LPT4/LPT5/LPT6/LPT7/LPT8/LPT9',
+ (b'data/_l/_l_p/_l_p_t/_l_p_t0/_l_p_t1/_l_p_t2/_l_p_t3/_l_p_t4/'
+ b'_l_p_t5/_l_p_t6/_l_p_t7/_l_p_t8/_l_p_t9'))
+ self.hybridencode(
+ (b'data/l.x/lp.x/lpt.x/lpt0.x/lpt1.x/lpt2.x/lpt3.x/lpt4.x/lpt5.x/'
+ b'lpt6.x/lpt7.x/lpt8.x/lpt9.x'),
+ (b'data/l.x/lp.x/lpt.x/lpt0.x/lp~741.x/lp~742.x/lp~743.x/lp~744.x/'
+ b'lp~745.x/lp~746.x/lp~747.x/lp~748.x/lp~749.x'))
+ self.hybridencode(
+ (b'data/x.l/x.lp/x.lpt/x.lpt0/x.lpt1/x.lpt2/x.lpt3/x.lpt4/x.lpt5/'
+ b'x.lpt6/x.lpt7/x.lpt8/x.lpt9'),
+ (b'data/x.l/x.lp/x.lpt/x.lpt0/x.lpt1/x.lpt2/x.lpt3/x.lpt4/x.lpt5'
+ b'/x.lpt6/x.lpt7/x.lpt8/x.lpt9'))
+ self.hybridencode(
+ (b'data/lx/lpx/lptx/lpt0x/lpt1x/lpt2x/lpt3x/lpt4x/lpt5x/lpt6x/'
+ b'lpt7x/lpt8x/lpt9x'),
+ (b'data/lx/lpx/lptx/lpt0x/lpt1x/lpt2x/lpt3x/lpt4x/lpt5x/lpt6x/'
+ b'lpt7x/lpt8x/lpt9x'))
+ self.hybridencode(
+ (b'data/xl/xlp/xlpt/xlpt0/xlpt1/xlpt2/xlpt3/xlpt4/xlpt5/xlpt6/'
+ b'xlpt7/xlpt8/xlpt9'),
+ (b'data/xl/xlp/xlpt/xlpt0/xlpt1/xlpt2/xlpt3/xlpt4/xlpt5/xlpt6/'
+ b'xlpt7/xlpt8/xlpt9'))
+ self.hybridencode(b'data/con/p/pr/prn/a/au/aux/n/nu/nul',
+ b'data/co~6e/p/pr/pr~6e/a/au/au~78/n/nu/nu~6c')
+ self.hybridencode(
+ b'data/CON/P/PR/PRN/A/AU/AUX/N/NU/NUL',
+ b'data/_c_o_n/_p/_p_r/_p_r_n/_a/_a_u/_a_u_x/_n/_n_u/_n_u_l')
+ self.hybridencode(
+ b'data/con.x/p.x/pr.x/prn.x/a.x/au.x/aux.x/n.x/nu.x/nul.x',
+ b'data/co~6e.x/p.x/pr.x/pr~6e.x/a.x/au.x/au~78.x/n.x/nu.x/nu~6c.x')
+ self.hybridencode(
+ b'data/x.con/x.p/x.pr/x.prn/x.a/x.au/x.aux/x.n/x.nu/x.nul',
+ b'data/x.con/x.p/x.pr/x.prn/x.a/x.au/x.aux/x.n/x.nu/x.nul')
+ self.hybridencode(b'data/conx/px/prx/prnx/ax/aux/auxx/nx/nux/nulx',
+ b'data/conx/px/prx/prnx/ax/au~78/auxx/nx/nux/nulx')
+ self.hybridencode(b'data/xcon/xp/xpr/xprn/xa/xau/xaux/xn/xnu/xnul',
+ b'data/xcon/xp/xpr/xprn/xa/xau/xaux/xn/xnu/xnul')
+ self.hybridencode(b'data/a./au./aux./auxy./aux.',
+ b'data/a~2e/au~2e/au~78~2e/auxy~2e/au~78~2e')
+ self.hybridencode(b'data/c./co./con./cony./con.',
+ b'data/c~2e/co~2e/co~6e~2e/cony~2e/co~6e~2e')
+ self.hybridencode(b'data/p./pr./prn./prny./prn.',
+ b'data/p~2e/pr~2e/pr~6e~2e/prny~2e/pr~6e~2e')
+ self.hybridencode(b'data/n./nu./nul./nuly./nul.',
+ b'data/n~2e/nu~2e/nu~6c~2e/nuly~2e/nu~6c~2e')
+ self.hybridencode(
+ b'data/l./lp./lpt./lpt1./lpt1y./lpt1.',
+ b'data/l~2e/lp~2e/lpt~2e/lp~741~2e/lpt1y~2e/lp~741~2e')
+ self.hybridencode(b'data/lpt9./lpt9y./lpt9.',
+ b'data/lp~749~2e/lpt9y~2e/lp~749~2e')
+ self.hybridencode(b'data/com./com1./com1y./com1.',
+ b'data/com~2e/co~6d1~2e/com1y~2e/co~6d1~2e')
+ self.hybridencode(b'data/com9./com9y./com9.',
+ b'data/co~6d9~2e/com9y~2e/co~6d9~2e')
+ self.hybridencode(b'data/a /au /aux /auxy /aux ',
+ b'data/a~20/au~20/aux~20/auxy~20/aux~20')
-print("uppercase hitting length limit due to encoding")
-show('data/A23456789-123456789-123456789-123456789-123456789-'
- 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12345')
-show('data/Z23456789-123456789-123456789-123456789-123456789-'
- 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12345')
-
-print("compare with lowercase not hitting limit")
-show('data/a23456789-123456789-123456789-123456789-123456789-'
- 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12345')
-show('data/z23456789-123456789-123456789-123456789-123456789-'
- 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12345')
-
-print("not hitting limit with any of these")
-show("data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&'()+,-.;="
- "[]^`{}xxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-"
- "123456789-12345")
-
-print("underbar hitting length limit due to encoding")
-show('data/_23456789-123456789-123456789-123456789-123456789-'
- 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12345')
-
-print("tilde hitting length limit due to encoding")
-show('data/~23456789-123456789-123456789-123456789-123456789-'
- 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12345')
+ def testhashingboundarycases(self):
+ # largest unhashed path
+ self.hybridencode(
+ (b'data/123456789-123456789-123456789-123456789-123456789-unhashed'
+ b'--xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'),
+ (b'data/123456789-123456789-123456789-123456789-123456789-unhashed'
+ b'--xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'))
+ # shortest hashed path
+ self.hybridencode(
+ (b'data/123456789-123456789-123456789-123456789-123456789-hashed'
+ b'----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
+ (b'dh/123456789-123456789-123456789-123456789-123456789-hashed---'
+ b'-xxxxxxxxx-xxxxxxxe9c55002b50bf5181e7a6fc1f60b126e2a6fcf71'))
-print("Windows reserved characters hitting length limit")
-show('data/<23456789-123456789-123456789-123456789-123456789-'
- 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12345')
-show('data/>23456789-123456789-123456789-123456789-123456789-'
- 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12345')
-show('data/:23456789-123456789-123456789-123456789-123456789-'
- 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12345')
-show('data/"23456789-123456789-123456789-123456789-123456789-'
- 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12345')
-show('data/\\23456789-123456789-123456789-123456789-123456789-'
- 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12345')
-show('data/|23456789-123456789-123456789-123456789-123456789-'
- 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12345')
-show('data/?23456789-123456789-123456789-123456789-123456789-'
- 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12345')
-show('data/*23456789-123456789-123456789-123456789-123456789-'
- 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12345')
+ def testhashing(self):
+ # changing one char in part that's hashed away produces a different hash
+ self.hybridencode(
+ (b'data/123456789-123456789-123456789-123456789-123456789-hashed'
+ b'----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxy-123456789-123456'),
+ (b'dh/123456789-123456789-123456789-123456789-123456789-hashed---'
+ b'-xxxxxxxxx-xxxxxxxd24fa4455faf8a94350c18e5eace7c2bb17af706'))
+ # uppercase hitting length limit due to encoding
+ self.hybridencode(
+ (b'data/A23456789-123456789-123456789-123456789-123456789-'
+ b'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+ b'123456789-12345'),
+ (b'dh/a23456789-123456789-123456789-123456789-123456789-'
+ b'xxxxxxxxx-xxxxxxxxx-xxxxxxx'
+ b'cbbc657029b41b94ed510d05feb6716a5c03bc6b'))
+ self.hybridencode(
+ (b'data/Z23456789-123456789-123456789-123456789-123456789-'
+ b'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+ b'123456789-12345'),
+ (b'dh/z23456789-123456789-123456789-123456789-123456789-xxxxxxxxx'
+ b'-xxxxxxxxx-xxxxxxx938f32a725c89512833fb96b6602dd9ebff51ddd'))
+ # compare with lowercase not hitting limit
+ self.hybridencode(
+ (b'data/a23456789-123456789-123456789-123456789-123456789-'
+ b'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-'
+ b'12345'),
+ (b'data/a23456789-123456789-123456789-123456789-123456789-'
+ b'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-'
+ b'12345'))
+ self.hybridencode(
+ (b'data/z23456789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789'
+ b'-12345'),
+ (b'data/z23456789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-'
+ b'12345'))
+ # not hitting limit with any of these
+ self.hybridencode(
+ (b'data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&\'()+,-.;=[]^`{}'
+ b'xxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'),
+ (b'data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&\'()+,-.;=[]^`{}'
+ b'xxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'))
+ # underbar hitting length limit due to encoding
+ self.hybridencode(
+ (b'data/_23456789-123456789-123456789-123456789-123456789-'
+ b'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-'
+ b'12345'),
+ (b'dh/_23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-'
+ b'xxxxxxxxx-xxxxxxx9921a01af50feeabc060ce00eee4cba6efc31d2b'))
+
+ # tilde hitting length limit due to encoding
+ self.hybridencode(
+ (b'data/~23456789-123456789-123456789-123456789-123456789-'
+ b'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-'
+ b'12345'),
+ (b'dh/~7e23456789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+ b'9cec6f97d569c10995f785720044ea2e4227481b'))
-print("initial space hitting length limit")
-show('data/ 23456789-123456789-123456789-123456789-123456789-'
- 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12345')
-
-print("initial dot hitting length limit")
-show('data/.23456789-123456789-123456789-123456789-123456789-'
- 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12345')
-
-print("trailing space in filename hitting length limit")
-show('data/123456789-123456789-123456789-123456789-123456789-'
- 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-1234 ')
+ def testwinreservedoverlimit(self):
+ # Windows reserved characters hitting length limit
+ self.hybridencode(
+ (b'data/<23456789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
+ b'-123456789-12345'),
+ (b'dh/~3c23456789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxxee'
+ b'67d8f275876ca1ef2500fc542e63c885c4e62d'))
+ self.hybridencode(
+ (b'data/>23456789-123456789-123456789-123456789-123456789-'
+ b'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+ b'123456789-12345'),
+ (b'dh/~3e23456789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+ b'387a85a5b1547cc9136310c974df716818458ddb'))
+ self.hybridencode(
+ (b'data/:23456789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+ b'123456789-12345'),
+ (b'dh/~3a23456789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+ b'2e4154fb571d13d22399c58cc4ef4858e4b75999'))
+ self.hybridencode(
+ (b'data/"23456789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
+ b'-123456789-12345'),
+ (b'dh/~2223456789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+ b'fc7e3ec7b0687ee06ed8c32fef0eb0c1980259f5'))
+ self.hybridencode(
+ (b'data/\\23456789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+ b'123456789-12345'),
+ (b'dh/~5c23456789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+ b'944e1f2b7110687e116e0d151328ac648b06ab4a'))
+ self.hybridencode(
+ (b'data/|23456789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
+ b'-123456789-12345'),
+ (b'dh/~7c23456789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+ b'28b23dd3fd0242946334126ab62bcd772aac32f4'))
+ self.hybridencode(
+ (b'data/?23456789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
+ b'-123456789-12345'),
+ (b'dh/~3f23456789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+ b'a263022d3994d2143d98f94f431eef8b5e7e0f8a'))
+ self.hybridencode(
+ (b'data/*23456789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+ b'123456789-12345'),
+ (b'dh/~2a23456789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+ b'0e7e6020e3c00ba7bb7893d84ca2966fbf53e140'))
-print("trailing dot in filename hitting length limit")
-show('data/123456789-123456789-123456789-123456789-123456789-'
- 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-1234.')
+ def testinitialspacelenlimit(self):
+ # initial space hitting length limit
+ self.hybridencode(
+ (b'data/ 23456789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+ b'123456789-12345'),
+ (b'dh/~2023456789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+ b'92acbc78ef8c0b796111629a02601f07d8aec4ea'))
-print("initial space in directory hitting length limit")
-show('data/ x/456789-123456789-123456789-123456789-123456789-'
- 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12345')
+ def testinitialdotlenlimit(self):
+ # initial dot hitting length limit
+ self.hybridencode(
+ (b'data/.23456789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
+ b'-123456789-12345'),
+ (b'dh/~2e23456789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+ b'dbe19cc6505b3515ab9228cebf877ad07075168f'))
+
+ def testtrailingspacelenlimit(self):
+ # trailing space in filename hitting length limit
+ self.hybridencode(
+ (b'data/123456789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+ b'123456789-1234 '),
+ (b'dh/123456789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxxxx'
+ b'0025dc73e04f97426db4893e3bf67d581dc6d066'))
-print("initial dot in directory hitting length limit")
-show('data/.x/456789-123456789-123456789-123456789-123456789-'
- 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12345')
+ def testtrailingdotlenlimit(self):
+ # trailing dot in filename hitting length limit
+ self.hybridencode(
+ (b'data/123456789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-'
+ b'1234.'),
+ (b'dh/123456789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxxxx'
+ b'85a16cf03ee7feba8a5abc626f1ba9886d01e89d'))
+
+ def testinitialspacedirlenlimit(self):
+ # initial space in directory hitting length limit
+ self.hybridencode(
+ (b'data/ x/456789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
+ b'-123456789-12345'),
+ (b'dh/~20x/456789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+ b'1b3a3b712b2ac00d6af14ae8b4c14fdbf904f516'))
-print("trailing space in directory hitting length limit")
-show('data/x /456789-123456789-123456789-123456789-123456789-'
- 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12345')
+ def testinitialdotdirlenlimit(self):
+ # initial dot in directory hitting length limit
+ self.hybridencode(
+ (b'data/.x/456789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
+ b'-123456789-12345'),
+ (b'dh/~2ex/456789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+ b'39dbc4c193a5643a8936fc69c3363cd7ac91ab14'))
+
+ def testtrailspacedirlenlimit(self):
+ # trailing space in directory hitting length limit
+ self.hybridencode(
+ (b'data/x /456789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
+ b'-123456789-12345'),
+ (b'dh/x~20/456789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+ b'2253c341df0b5290790ad312cd8499850f2273e5'))
-print("trailing dot in directory hitting length limit")
-show('data/x./456789-123456789-123456789-123456789-123456789-'
- 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12345')
+ def testtrailingdotdirlenlimit(self):
+ # trailing dot in directory hitting length limit
+ self.hybridencode(
+ (b'data/x./456789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+ b'123456789-12345'),
+ (b'dh/x~2e/456789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+ b'cc0324d696d34562b44b5138db08ee1594ccc583'))
-print("with directories that need direncoding, hitting length limit")
-show('data/x.i/56789-123456789-123456789-123456789-123456789-'
- 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12345')
-show('data/x.d/56789-123456789-123456789-123456789-123456789-'
- 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12345')
-show('data/x.hg/5789-123456789-123456789-123456789-123456789-'
- 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12345')
+ def testdirencodinglenlimit(self):
+ # with directories that need direncoding, hitting length limit
+ self.hybridencode(
+ (b'data/x.i/56789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-'
+ b'12345'),
+ (b'dh/x.i.hg/56789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxx'
+ b'a4c4399bdf81c67dbbbb7060aa0124d8dea94f74'))
+ self.hybridencode(
+ (b'data/x.d/56789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
+ b'-123456789-12345'),
+ (b'dh/x.d.hg/56789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxx'
+ b'1303fa90473b230615f5b3ea7b660e881ae5270a'))
+ self.hybridencode(
+ (b'data/x.hg/5789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
+ b'-123456789-12345'),
+ (b'dh/x.hg.hg/5789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxx'
+ b'26d724a8af68e7a4e4455e6602ea9adbd0eb801f'))
-print("Windows reserved filenames, hitting length limit")
-show('data/con/56789-123456789-123456789-123456789-123456789-'
- 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12345')
-show('data/prn/56789-123456789-123456789-123456789-123456789-'
- 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12345')
-show('data/aux/56789-123456789-123456789-123456789-123456789-'
- 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12345')
-show('data/nul/56789-123456789-123456789-123456789-123456789-'
- 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12345')
-show('data/com1/6789-123456789-123456789-123456789-123456789-'
- 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12345')
-show('data/com9/6789-123456789-123456789-123456789-123456789-'
- 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12345')
-show('data/lpt1/6789-123456789-123456789-123456789-123456789-'
- 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12345')
-show('data/lpt9/6789-123456789-123456789-123456789-123456789-'
- 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12345')
+ def testwinreservedfilenameslimit(self):
+ # Windows reserved filenames, hitting length limit
+ self.hybridencode(
+ (b'data/con/56789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+ b'123456789-12345'),
+ (b'dh/co~6e/56789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+ b'c0794d4f4c605a2617900eb2563d7113cf6ea7d3'))
+ self.hybridencode(
+ (b'data/prn/56789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
+ b'-123456789-12345'),
+ (b'dh/pr~6e/56789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+ b'64db876e1a9730e27236cb9b167aff942240e932'))
+ self.hybridencode(
+ (b'data/aux/56789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
+ b'-123456789-12345'),
+ (b'dh/au~78/56789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+ b'8a178558405ca6fb4bbd75446dfa186f06751a0d'))
+ self.hybridencode(
+ (b'data/nul/56789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
+ b'-123456789-12345'),
+ (b'dh/nu~6c/56789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+ b'c5e51b6fec1bd07bd243b053a0c3f7209855b886'))
+ self.hybridencode(
+ (b'data/com1/6789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
+ b'-123456789-12345'),
+ (b'dh/co~6d1/6789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+ b'32f5f44ece3bb62b9327369ca84cc19c86259fcd'))
+ self.hybridencode(
+ (b'data/com9/6789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
+ b'-123456789-12345'),
+ (b'dh/co~6d9/6789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+ b'734360b28c66a3230f55849fe8926206d229f990'))
+ self.hybridencode(
+ (b'data/lpt1/6789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
+ b'-123456789-12345'),
+ (b'dh/lp~741/6789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+ b'e6f16ab4b6b0637676b2842b3345c9836df46ef7'))
+ self.hybridencode(
+ (b'data/lpt9/6789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
+ b'-123456789-12345'),
+ (b'dh/lp~749/6789-123456789-123456789-123456789-123456789'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+ b'a475814c51acead3e44f2ff801f0c4903f986157'))
-print("non-reserved names, just not hitting limit")
-show('data/123456789-123456789-123456789-123456789-123456789-'
- '/com/com0/lpt/lpt0/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12345')
-
-print("hashed path with largest untruncated 1st dir")
-show('data/12345678/-123456789-123456789-123456789-123456789-'
- 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-123456')
-
-print("hashed path with smallest truncated 1st dir")
-show('data/123456789/123456789-123456789-123456789-123456789-'
- 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-123456')
+ def testnonreservednolimit(self):
+ # non-reserved names, just not hitting limit
+ self.hybridencode(
+ (b'data/123456789-123456789-123456789-123456789-123456789-'
+ b'/com/com0/lpt/lpt0/'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'),
+ (b'data/123456789-123456789-123456789-123456789-123456789-'
+ b'/com/com0/lpt/lpt0/'
+ b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'))
-print("hashed path with largest untruncated two dirs")
-show('data/12345678/12345678/9-123456789-123456789-123456789-'
- 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-123456')
+ def testhashedpathuntrucfirst(self):
+ # hashed path with largest untruncated 1st dir
+ self.hybridencode(
+ (b'data/12345678/-123456789-123456789-123456789-123456789-hashed'
+ b'----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
+ (b'dh/12345678/-123456789-123456789-123456789-123456789-hashed'
+ b'----xxxxxxxxx-xxxxxxx4e9e9e384d00929a93b6835fbf976eb32321ff3c'))
-print("hashed path with smallest truncated two dirs")
-show('data/123456789/123456789/123456789-123456789-123456789-'
- 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-123456')
+ def testhashedpathsmallesttrucdir(self):
+ # hashed path with smallest truncated 1st dir
+ self.hybridencode(
+ (b'data/123456789/123456789-123456789-123456789-123456789-hashed'
+ b'----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
+ (b'dh/12345678/123456789-123456789-123456789-123456789-hashed'
+ b'----xxxxxxxxx-xxxxxxxx1f4e4ec5f2be76e109bfaa8e31c062fe426d5490'))
-print("hashed path with largest untruncated three dirs")
-show('data/12345678/12345678/12345678/89-123456789-123456789-'
- 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-123456')
+ def testhashedlargesttwountruc(self):
+ # hashed path with largest untruncated two dirs
+ self.hybridencode(
+ (b'data/12345678/12345678/9-123456789-123456789-123456789-hashed'
+ b'----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
+ (b'dh/12345678/12345678/9-123456789-123456789-123456789-hashed'
+ b'----xxxxxxxxx-xxxxxxx3332d8329d969cf835542a9f2cbcfb385b6cf39d'))
-print("hashed path with smallest truncated three dirs")
-show('data/123456789/123456789/123456789/123456789-123456789-'
- 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-123456')
+ def testhashedpathsmallesttrunctwodirs(self):
+ # hashed path with smallest truncated two dirs
+ self.hybridencode(
+ (b'data/123456789/123456789/123456789-123456789-123456789-hashed'
+ b'----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
+ (b'dh/12345678/12345678/123456789-123456789-123456789-hashed'
+ b'----xxxxxxxxx-xxxxxxxxx'
+ b'9699559798247dffa18717138859be5f8874840e'))
-print("hashed path with largest untruncated four dirs")
-show('data/12345678/12345678/12345678/12345678/789-123456789-'
- 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-123456')
+ def testhashuntruncthree(self):
+ # hashed path with largest untruncated three dirs
+ self.hybridencode(
+ (b'data/12345678/12345678/12345678/89-123456789-123456789-'
+ b'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+ b'123456789-123456'),
+ (b'dh/12345678/12345678/12345678/89-123456789-123456789-hashed'
+ b'----xxxxxxxxx-xxxxxxxf0a2b053bb1369cce02f78c217d6a7aaea18c439'))
-print("hashed path with smallest truncated four dirs")
-show('data/123456789/123456789/123456789/123456789/123456789-'
- 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-123456')
+ def testhashtruncthree(self):
+ # hashed path with smallest truncated three dirs
+ self.hybridencode(
+ (b'data/123456789/123456789/123456789/123456789-123456789-hashed'
+ b'----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
+ (b'dh/12345678/12345678/12345678/123456789-123456789-hashed'
+ b'----xxxxxxxxx-xxxxxxxxx-'
+ b'1c6f8284967384ec13985a046d3553179d9d03cd'))
-print("hashed path with largest untruncated five dirs")
-show('data/12345678/12345678/12345678/12345678/12345678/6789-'
- 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-123456')
+ def testhashuntrucfour(self):
+ # hashed path with largest untruncated four dirs
+ self.hybridencode(
+ (b'data/12345678/12345678/12345678/12345678/789-123456789-hashed'
+ b'----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
+ (b'dh/12345678/12345678/12345678/12345678/789-123456789-hashed'
+ b'----xxxxxxxxx-xxxxxxx0d30c99049d8f0ff97b94d4ef302027e8d54c6fd'))
-print("hashed path with smallest truncated five dirs")
-show('data/123456789/123456789/123456789/123456789/123456789/'
- 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-123456')
+ def testhashtruncfour(self):
+ # hashed path with smallest truncated four dirs
+ self.hybridencode(
+ (b'data/123456789/123456789/123456789/123456789/123456789-hashed'
+ b'----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
+ (b'dh/12345678/12345678/12345678/12345678/123456789-hashed'
+ b'----xxxxxxxxx-xxxxxxxxx-x'
+ b'46162779e1a771810b37a737f82ae7ed33771402'))
-print("hashed path with largest untruncated six dirs")
-show('data/12345678/12345678/12345678/12345678/12345678/12345'
- '678/ed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-123456')
+ def testhashuntruncfive(self):
+ # hashed path with largest untruncated five dirs
+ self.hybridencode(
+ (b'data/12345678/12345678/12345678/12345678/12345678/6789-hashed'
+ b'----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
+ (b'dh/12345678/12345678/12345678/12345678/12345678/6789-hashed'
+ b'----xxxxxxxxx-xxxxxxxbfe752ddc8b003c2790c66a9f2eb1ea75c114390'))
-print("hashed path with smallest truncated six dirs")
-show('data/123456789/123456789/123456789/123456789/123456789/'
- '123456789/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-123456')
+ def testhashtruncfive(self):
+ # hashed path with smallest truncated five dirs
+ self.hybridencode(
+ (b'data/123456789/123456789/123456789/123456789/123456789/hashed'
+ b'----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
+ (b'dh/12345678/12345678/12345678/12345678/12345678/hashed'
+ b'----xxxxxxxxx-xxxxxxxxx-xx'
+ b'b94c27b3532fa880cdd572b1c514785cab7b6ff2'))
+
+ def testhashuntruncsix(self):
+ # hashed path with largest untruncated six dirs
+ self.hybridencode(
+ (b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
+ b'ed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
+ (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/'
+ b'ed----xxxxxxxxx-xxxxxxx'
+ b'cd8cc5483a0f3be409e0e5d4bf9e36e113c59235'))
-print("hashed path with largest untruncated seven dirs")
-show('data/12345678/12345678/12345678/12345678/12345678/12345'
- '678/12345678/xxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-123456')
+ def testhashtruncsix(self):
+ # hashed path with smallest truncated six dirs
+ self.hybridencode(
+ (b'data/123456789/123456789/123456789/123456789/123456789/'
+ b'123456789/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+ b'123456789-123456'),
+ (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/'
+ b'xxxxxxxxx-xxxxxxxxx-xxx'
+ b'47dd6f616f833a142da00701b334cebbf640da06'))
-print("hashed path with smallest truncated seven dirs")
-show('data/123456789/123456789/123456789/123456789/123456789/'
- '123456789/123456789/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-123456')
+ def testhashuntrunc7(self):
+ # hashed path with largest untruncated seven dirs
+ self.hybridencode(
+ (b'data/12345678/12345678/12345678/12345678/12345678/12345678'
+ b'/12345678/xxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+ b'123456789-123456'),
+ (b'dh/12345678/12345678/12345678/12345678/12345678/12345678'
+ b'/12345678/xxxxxx-xxxxxxx'
+ b'1c8ed635229fc22efe51035feeadeb4c8a0ecb82'))
-print("hashed path with largest untruncated eight dirs")
-print("(directory 8 is dropped because it hits _maxshortdirslen)")
-show('data/12345678/12345678/12345678/12345678/12345678/12345'
- '678/12345678/12345678/xxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-123456')
+ def testhashtrunc7(self):
+ # hashed path with smallest truncated seven dirs
+ self.hybridencode(
+ (b'data/123456789/123456789/123456789/123456789/123456789/'
+ b'123456789/123456789/'
+ b'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
+ (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/123'
+ b'45678/xxxxxxxxx-xxxx298ff7d33f8ce6db57930837ffea2fb2f48bb926'))
-print("hashed path with smallest truncated eight dirs")
-print("(directory 8 is dropped because it hits _maxshortdirslen)")
-show('data/123456789/123456789/123456789/123456789/123456789/'
- '123456789/123456789/123456789/xxxxxxxxx-xxxxxxxxx-'
- '123456789-123456')
+ def testhashuntrunc8(self):
+ # hashed path with largest untruncated eight dirs
+ # (directory 8 is dropped because it hits _maxshortdirslen)
+ self.hybridencode(
+ (b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
+ b'12345678/12345678/xxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
+ (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/1'
+ b'2345678/xxxxxxx-xxxxxxc8996ccd41b471f768057181a4d59d2febe7277d'))
-print("hashed path with largest non-dropped directory 8")
-print("(just not hitting the _maxshortdirslen boundary)")
-show('data/12345678/12345678/12345678/12345678/12345678/12345'
- '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-123456')
+ def testhashtrunc8(self):
+ # hashed path with smallest truncated eight dirs
+ # (directory 8 is dropped because it hits _maxshortdirslen)
+ self.hybridencode(
+ (b'data/123456789/123456789/123456789/123456789/123456789/'
+ b'123456789/123456789/123456789/xxxxxxxxx-xxxxxxxxx-'
+ b'123456789-123456'),
+ (b'dh/12345678/12345678/12345678/12345678/12345678/12345678'
+ b'/12345678/xxxxxxxxx-xxxx'
+ b'4fa04a839a6bda93e1c21c713f2edcbd16e8890d'))
-print("...adding one truncated char to dir 1..7 won't drop dir 8")
-show('data/12345678x/12345678/12345678/12345678/12345678/12345'
- '678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-123456')
-show('data/12345678/12345678x/12345678/12345678/12345678/12345'
- '678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-123456')
-show('data/12345678/12345678/12345678x/12345678/12345678/12345'
- '678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-123456')
-show('data/12345678/12345678/12345678/12345678x/12345678/12345'
- '678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-123456')
-show('data/12345678/12345678/12345678/12345678/12345678x/12345'
- '678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-123456')
-show('data/12345678/12345678/12345678/12345678/12345678/12345'
- '678x/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-123456')
-show('data/12345678/12345678/12345678/12345678/12345678/12345'
- '678/12345678x/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-123456')
+ def testhashnondropped8(self):
+ # hashed path with largest non-dropped directory 8
+ # (just not hitting the _maxshortdirslen boundary)
+ self.hybridencode(
+ (b'data/12345678/12345678/12345678/12345678/12345678/12345678'
+ b'/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789'
+ b'-123456'),
+ (b'dh/12345678/12345678/12345678/12345678/12345678/12345678'
+ b'/12345678/12345/-xxxxxxx'
+ b'4d43d1ccaa20efbfe99ec779dc063611536ff2c5'))
+ # ...adding one truncated char to dir 1..7 won't drop dir 8
+ self.hybridencode(
+ (b'data/12345678x/12345678/12345678/12345678/12345678/12345678'
+ b'/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
+ (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/1234'
+ b'5678/12345/xxxxxxxx0f9efce65189cc60fd90fe4ffd49d7b58bbe0f2e'))
+ self.hybridencode(
+ (b'data/12345678/12345678x/12345678/12345678/12345678/12345678'
+ b'/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
+ (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/1234'
+ b'5678/12345/xxxxxxxx945ca395708cafdd54a94501859beabd3e243921'))
+ self.hybridencode(
+ (b'data/12345678/12345678/12345678x/12345678/12345678/12345678/12'
+ b'345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
+ (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/1234'
+ b'5678/12345/xxxxxxxxac62bf6898c4fd0502146074547c11caa751a327'))
+ self.hybridencode(
+ (b'data/12345678/12345678/12345678/12345678x/12345678/12345678/12'
+ b'345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
+ (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/1234'
+ b'5678/12345/xxxxxxxx2ae5a2baed7983fae8974d0ca06c6bf08b9aee92'))
+ self.hybridencode(
+ (b'data/12345678/12345678/12345678/12345678/12345678x/12345678/'
+ b'12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
+ (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/1234'
+ b'5678/12345/xxxxxxxx214aba07b6687532a43d1e9eaf6e88cfca96b68c'))
+ self.hybridencode(
+ (b'data/12345678/12345678/12345678/12345678/12345678/12345678x'
+ b'/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
+ (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/1234'
+ b'5678/12345/xxxxxxxxe7a022ae82f0f55cf4e0498e55ba59ea4ebb55bf'))
+ self.hybridencode(
+ (b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
+ b'12345678x/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
+ (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345'
+ b'678/12345/xxxxxxxxb51ce61164996a80f36ce3cfe64b62d519aedae3'))
-print("hashed path with shortest dropped directory 8")
-print("(just hitting the _maxshortdirslen boundary)")
-show('data/12345678/12345678/12345678/12345678/12345678/12345'
- '678/12345678/123456/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-123456')
-
-print("hashed path that drops dir 8 due to dot or space at end is")
-print("encoded, and thus causing to hit _maxshortdirslen")
-show('data/12345678/12345678/12345678/12345678/12345678/12345'
- '678/12345678/1234./-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-123456')
-show('data/12345678/12345678/12345678/12345678/12345678/12345'
- '678/12345678/1234 /-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-123456')
+ def testhashedpathshortestdropped8(self):
+ # hashed path with shortest dropped directory 8
+ # (just hitting the _maxshortdirslen boundary)
+ self.hybridencode(
+ (b'data/12345678/12345678/12345678/12345678/12345678/12345678'
+ b'/12345678/123456/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+ b'123456789-123456'),
+ (b'dh/12345678/12345678/12345678/12345678/12345678/12345678'
+ b'/12345678/xxxxxxxxx-xxxx'
+ b'11fa9873cc6c3215eae864528b5530a04efc6cfe'))
-print("... with dir 8 short enough for encoding")
-show('data/12345678/12345678/12345678/12345678/12345678/12345'
- '678/12345678/12./xx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-123456')
-show('data/12345678/12345678/12345678/12345678/12345678/12345'
- '678/12345678/12 /xx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-123456')
+ def testhashedpathdropsdir8fortrailingdotspace(self):
+ # hashed path that drops dir 8 due to dot or space at end is
+ # encoded, and thus causing to hit _maxshortdirslen
+ self.hybridencode(
+ (b'data/12345678/12345678/12345678/12345678/12345678/12345678'
+ b'/12345678/1234./-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+ b'123456789-123456'),
+ (b'dh/12345678/12345678/12345678/12345678/12345678/12345678'
+ b'/12345678/-xxxxxxxxx-xxx'
+ b'602df9b45bec564e2e1f0645d5140dddcc76ed58'))
+ self.hybridencode(
+ (b'data/12345678/12345678/12345678/12345678/12345678/12345678'
+ b'/12345678/1234 /-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+ b'123456789-123456'),
+ (b'dh/12345678/12345678/12345678/12345678/12345678/12345678'
+ b'/12345678/-xxxxxxxxx-xxx'
+ b'd99ff212bc84b4d1f70cd6b0071e3ef69d4e12ce'))
+ # ... with dir 8 short enough for encoding
+ self.hybridencode(
+ (b'data/12345678/12345678/12345678/12345678/12345678/12345678'
+ b'/12345678/12./xx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
+ b'-123456789-123456'),
+ (b'dh/12345678/12345678/12345678/12345678/12345678/12345678'
+ b'/12345678/12~2e/'
+ b'xx-xxxxx7baeb5ed7f14a586ee1cacecdbcbff70032d1b3c'))
+ self.hybridencode(
+ (b'data/12345678/12345678/12345678/12345678/12345678/12345678'
+ b'/12345678/12 '
+ b'/xx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
+ (b'dh/12345678/12345678/12345678/12345678/12345678/12345678'
+ b'/12345678/12~20/'
+ b'xx-xxxxxcf79ca9795f77d7f75745da36807e5d772bd5182'))
-print('''Extensions are replicated on hashed paths. Note that
-we only get to encode files that end in .i or .d inside the
-store. Encoded filenames are thus bound in length.''')
-show('data/12345678/12345678/12345678/12345678/12345678/12345'
- '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12.345.i')
-show('data/12345678/12345678/12345678/12345678/12345678/12345'
- '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12.345.d')
-show('data/12345678/12345678/12345678/12345678/12345678/12345'
- '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12.3456.i')
-show('data/12345678/12345678/12345678/12345678/12345678/12345'
- '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12.34567.i')
-show('data/12345678/12345678/12345678/12345678/12345678/12345'
- '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12.345678.i')
-show('data/12345678/12345678/12345678/12345678/12345678/12345'
- '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12.3456789.i')
-show('data/12345678/12345678/12345678/12345678/12345678/12345'
- '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12.3456789-.i')
-show('data/12345678/12345678/12345678/12345678/12345678/12345'
- '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12.3456789-1.i')
-show('data/12345678/12345678/12345678/12345678/12345678/12345'
- '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12.3456789-12.i')
-show('data/12345678/12345678/12345678/12345678/12345678/12345'
- '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12.3456789-123.i')
-show('data/12345678/12345678/12345678/12345678/12345678/12345'
- '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12.3456789-1234.i')
-show('data/12345678/12345678/12345678/12345678/12345678/12345'
- '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12.3456789-12345.i')
-show('data/12345678/12345678/12345678/12345678/12345678/12345'
- '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
- '123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWX'
- 'YZ-abcdefghjiklmnopqrstuvwxyz-ABCDEFGHIJKLMNOPRSTU'
- 'VWXYZ-1234567890-xxxxxxxxx-xxxxxxxxx-xxxxxxxx-xxxx'
- 'xxxxx-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwww'
- 'wwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww.i')
+ def testextensionsreplicatedonhashedpaths(self):
+ # Extensions are replicated on hashed paths. Note that
+ # we only get to encode files that end in .i or .d inside the
+ # store. Encoded filenames are thus bound in length.
+ self.hybridencode(
+ (b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
+ b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
+ b'45.i'),
+ (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
+ b'345678/12345/-xxxxxc10ad03b5755ed524f5286aab1815dfe07729438.i'))
+ self.hybridencode(
+ (b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
+ b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
+ b'45.d'),
+ (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
+ b'345678/12345/-xxxxx9eec83381f2b39ef5ac8b4ecdf2c94f7983f57c8.d'))
+ self.hybridencode(
+ (b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
+ b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
+ b'456.i'),
+ (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
+ b'345678/12345/-xxxxxb7796dc7d175cfb0bb8a7728f58f6ebec9042568.i'))
+ self.hybridencode(
+ (b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
+ b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
+ b'4567.i'),
+ (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
+ b'345678/12345/-xxxxxb515857a6bfeef017c4894d8df42458ac65d55b8.i'))
+ self.hybridencode(
+ (b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
+ b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
+ b'45678.i'),
+ (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
+ b'345678/12345/-xxxxxb05a0f247bc0a776211cd6a32ab714fd9cc09f2b.i'))
+ self.hybridencode(
+ (b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
+ b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
+ b'456789.i'),
+ (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
+ b'345678/12345/-xxxxxf192b48bff08d9e0e12035fb52bc58c70de72c94.i'))
+ self.hybridencode(
+ (b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
+ b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
+ b'456789-.i'),
+ (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
+ b'345678/12345/-xxxxx435551e0ed4c7b083b9ba83cee916670e02e80ad.i'))
+ self.hybridencode(
+ (b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
+ b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
+ b'456789-1.i'),
+ (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
+ b'345678/12345/-xxxxxa7f74eb98d8d58b716356dfd26e2f9aaa65d6a9a.i'))
+ self.hybridencode(
+ (b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
+ b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
+ b'456789-12.i'),
+ (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
+ b'345678/12345/-xxxxxed68d9bd43b931f0b100267fee488d65a0c66f62.i'))
+ self.hybridencode(
+ (b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
+ b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
+ b'456789-123.i'),
+ (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
+ b'345678/12345/-xxxxx5cea44de2b642d2ba2b4a30693ffb1049644d698.i'))
+ self.hybridencode(
+ (b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
+ b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
+ b'456789-1234.i'),
+ (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
+ b'345678/12345/-xxxxx68462f62a7f230b39c1b5400d73ec35920990b7e.i'))
+ self.hybridencode(
+ (b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
+ b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
+ b'456789-12345.i'),
+ (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
+ b'345678/12345/-xxxxx4cb852a314c6da240a83eec94761cdd71c6ec22e.i'))
+ self.hybridencode(
+ (b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
+ b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
+ b'456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-'
+ b'abcdefghjiklmnopqrstuvwxyz-ABCDEFGHIJKLMNOPRSTUVWXYZ'
+ b'-1234567890-xxxxxxxxx-xxxxxxxxx-xxxxxxxx'
+ b'-xxxxxxxxx-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww'
+ b'-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww.i'),
+ (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
+ b'345678/12345/-xxxxx93352aa50377751d9e5ebdf52da1e6e69a6887a6.i'))
-print("paths outside data/ can be encoded")
-show('metadata/dir/00manifest.i')
-show('metadata/12345678/12345678/12345678/12345678/12345678/'
- '12345678/12345678/12345678/12345678/12345678/12345678/'
- '12345678/12345678/00manifest.i')
+ def testpathsoutsidedata(self):
+ # paths outside data/ can be encoded
+ self.hybridencode(b'metadata/dir/00manifest.i',
+ b'metadata/dir/00manifest.i')
+ self.hybridencode(
+ (b'metadata/12345678/12345678/12345678/12345678/12345678'
+ b'/12345678/12345678/12345678/12345678/12345678/12345678'
+ b'/12345678/12345678/00manifest.i'),
+ (b'dh/ata/12345678/12345678/12345678/12345678/12345678'
+ b'/12345678/12345678/00manife'
+ b'0a4da1f89aa2aa9eb0896eb451288419049781b4.i'))
+
+if __name__ == '__main__':
+ import silenttestrunner
+ silenttestrunner.main(__name__)
--- a/tests/test-hybridencode.py.out Wed Jun 06 13:28:49 2018 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,500 +0,0 @@
-A = 'data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&\'()+,-.;=[]^`{}'
-B = 'data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&\'()+,-.;=[]^`{}'
-
-uppercase char X is encoded as _x
-A = 'data/ABCDEFGHIJKLMNOPQRSTUVWXYZ'
-B = 'data/_a_b_c_d_e_f_g_h_i_j_k_l_m_n_o_p_q_r_s_t_u_v_w_x_y_z'
-
-underbar is doubled
-A = 'data/_'
-B = 'data/__'
-
-tilde is character-encoded
-A = 'data/~'
-B = 'data/~7e'
-
-characters in ASCII code range 1..31
-A = 'data/\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f'
-B = 'data/~01~02~03~04~05~06~07~08~09~0a~0b~0c~0d~0e~0f~10~11~12~13~14~15~16~17~18~19~1a~1b~1c~1d~1e~1f'
-
-characters in ASCII code range 126..255
-A = 'data/~\x7f\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f'
-B = 'data/~7e~7f~80~81~82~83~84~85~86~87~88~89~8a~8b~8c~8d~8e~8f~90~91~92~93~94~95~96~97~98~99~9a~9b~9c~9d~9e~9f'
-
-A = 'data/\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9\xaa\xab\xac\xad\xae\xaf\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf'
-B = 'data/~a0~a1~a2~a3~a4~a5~a6~a7~a8~a9~aa~ab~ac~ad~ae~af~b0~b1~b2~b3~b4~b5~b6~b7~b8~b9~ba~bb~bc~bd~be~bf'
-
-A = 'data/\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf'
-B = 'data/~c0~c1~c2~c3~c4~c5~c6~c7~c8~c9~ca~cb~cc~cd~ce~cf~d0~d1~d2~d3~d4~d5~d6~d7~d8~d9~da~db~dc~dd~de~df'
-
-A = 'data/\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed\xee\xef\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff'
-B = 'data/~e0~e1~e2~e3~e4~e5~e6~e7~e8~e9~ea~eb~ec~ed~ee~ef~f0~f1~f2~f3~f4~f5~f6~f7~f8~f9~fa~fb~fc~fd~fe~ff'
-
-Windows reserved characters
-A = 'data/less <, greater >, colon :, double-quote ", backslash \\, pipe |, question-mark ?, asterisk *'
-B = 'data/less ~3c, greater ~3e, colon ~3a, double-quote ~22, backslash ~5c, pipe ~7c, question-mark ~3f, asterisk ~2a'
-
-encoding directories ending in .hg, .i or .d with '.hg' suffix
-A = 'data/x.h.i/x.hg/x.i/x.d/foo'
-B = 'data/x.h.i.hg/x.hg.hg/x.i.hg/x.d.hg/foo'
-
-A = 'data/a.hg/a.i/a.d/foo'
-B = 'data/a.hg.hg/a.i.hg/a.d.hg/foo'
-
-A = 'data/au.hg/au.i/au.d/foo'
-B = 'data/au.hg.hg/au.i.hg/au.d.hg/foo'
-
-A = 'data/aux.hg/aux.i/aux.d/foo'
-B = 'data/au~78.hg.hg/au~78.i.hg/au~78.d.hg/foo'
-
-A = 'data/auxy.hg/auxy.i/auxy.d/foo'
-B = 'data/auxy.hg.hg/auxy.i.hg/auxy.d.hg/foo'
-
-but these are not encoded on *filenames*
-A = 'data/foo/x.hg'
-B = 'data/foo/x.hg'
-
-A = 'data/foo/x.i'
-B = 'data/foo/x.i'
-
-A = 'data/foo/x.d'
-B = 'data/foo/x.d'
-
-A = 'data/foo/a.hg'
-B = 'data/foo/a.hg'
-
-A = 'data/foo/a.i'
-B = 'data/foo/a.i'
-
-A = 'data/foo/a.d'
-B = 'data/foo/a.d'
-
-A = 'data/foo/au.hg'
-B = 'data/foo/au.hg'
-
-A = 'data/foo/au.i'
-B = 'data/foo/au.i'
-
-A = 'data/foo/au.d'
-B = 'data/foo/au.d'
-
-A = 'data/foo/aux.hg'
-B = 'data/foo/au~78.hg'
-
-A = 'data/foo/aux.i'
-B = 'data/foo/au~78.i'
-
-A = 'data/foo/aux.d'
-B = 'data/foo/au~78.d'
-
-A = 'data/foo/auxy.hg'
-B = 'data/foo/auxy.hg'
-
-A = 'data/foo/auxy.i'
-B = 'data/foo/auxy.i'
-
-A = 'data/foo/auxy.d'
-B = 'data/foo/auxy.d'
-
-plain .hg, .i and .d directories have the leading dot encoded
-A = 'data/.hg/.i/.d/foo'
-B = 'data/~2ehg.hg/~2ei.hg/~2ed.hg/foo'
-
-A = 'data/aux.bla/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c.i'
-B = 'data/au~78.bla/bla.aux/pr~6e/_p_r_n/lpt/co~6d3/nu~6c/coma/foo._n_u_l/normal.c.i'
-
-A = 'data/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT.i'
-B = 'dh/au~78/second/x.prn/fourth/fi~3afth/sixth/seventh/eighth/nineth/tenth/loremia20419e358ddff1bf8751e38288aff1d7c32ec05.i'
-
-A = 'data/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider.i'
-B = 'dh/enterpri/openesba/contrib-/corba-bc/netbeans/wsdlexte/src/main/java/org.net7018f27961fdf338a598a40c4683429e7ffb9743.i'
-
-A = 'data/AUX.THE-QUICK-BROWN-FOX-JU:MPS-OVER-THE-LAZY-DOG-THE-QUICK-BROWN-FOX-JUMPS-OVER-THE-LAZY-DOG.TXT.i'
-B = 'dh/au~78.the-quick-brown-fox-ju~3amps-over-the-lazy-dog-the-quick-brown-fox-jud4dcadd033000ab2b26eb66bae1906bcb15d4a70.i'
-
-A = 'data/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt'
-B = 'dh/project_/resource/anotherl/followed/andanoth/andthenanextremelylongfilenaf93030515d9849cfdca52937c2204d19f83913e5.txt'
-
-A = 'data/Project.Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt'
-B = 'dh/project_/resource/anotherl/followed/andanoth/andthenanextremelylongfilena0fd7c506f5c9d58204444fc67e9499006bd2d445.txt'
-
-A = 'data/foo.../foo / /a./_. /__/.x../ bla/.FOO/something.i'
-B = 'data/foo..~2e/foo ~20/~20/a~2e/__.~20/____/~2ex.~2e/~20 bla/~2e_f_o_o/something.i'
-
-A = 'data/c/co/com/com0/com1/com2/com3/com4/com5/com6/com7/com8/com9'
-B = 'data/c/co/com/com0/co~6d1/co~6d2/co~6d3/co~6d4/co~6d5/co~6d6/co~6d7/co~6d8/co~6d9'
-
-A = 'data/C/CO/COM/COM0/COM1/COM2/COM3/COM4/COM5/COM6/COM7/COM8/COM9'
-B = 'data/_c/_c_o/_c_o_m/_c_o_m0/_c_o_m1/_c_o_m2/_c_o_m3/_c_o_m4/_c_o_m5/_c_o_m6/_c_o_m7/_c_o_m8/_c_o_m9'
-
-A = 'data/c.x/co.x/com.x/com0.x/com1.x/com2.x/com3.x/com4.x/com5.x/com6.x/com7.x/com8.x/com9.x'
-B = 'data/c.x/co.x/com.x/com0.x/co~6d1.x/co~6d2.x/co~6d3.x/co~6d4.x/co~6d5.x/co~6d6.x/co~6d7.x/co~6d8.x/co~6d9.x'
-
-A = 'data/x.c/x.co/x.com0/x.com1/x.com2/x.com3/x.com4/x.com5/x.com6/x.com7/x.com8/x.com9'
-B = 'data/x.c/x.co/x.com0/x.com1/x.com2/x.com3/x.com4/x.com5/x.com6/x.com7/x.com8/x.com9'
-
-A = 'data/cx/cox/comx/com0x/com1x/com2x/com3x/com4x/com5x/com6x/com7x/com8x/com9x'
-B = 'data/cx/cox/comx/com0x/com1x/com2x/com3x/com4x/com5x/com6x/com7x/com8x/com9x'
-
-A = 'data/xc/xco/xcom0/xcom1/xcom2/xcom3/xcom4/xcom5/xcom6/xcom7/xcom8/xcom9'
-B = 'data/xc/xco/xcom0/xcom1/xcom2/xcom3/xcom4/xcom5/xcom6/xcom7/xcom8/xcom9'
-
-A = 'data/l/lp/lpt/lpt0/lpt1/lpt2/lpt3/lpt4/lpt5/lpt6/lpt7/lpt8/lpt9'
-B = 'data/l/lp/lpt/lpt0/lp~741/lp~742/lp~743/lp~744/lp~745/lp~746/lp~747/lp~748/lp~749'
-
-A = 'data/L/LP/LPT/LPT0/LPT1/LPT2/LPT3/LPT4/LPT5/LPT6/LPT7/LPT8/LPT9'
-B = 'data/_l/_l_p/_l_p_t/_l_p_t0/_l_p_t1/_l_p_t2/_l_p_t3/_l_p_t4/_l_p_t5/_l_p_t6/_l_p_t7/_l_p_t8/_l_p_t9'
-
-A = 'data/l.x/lp.x/lpt.x/lpt0.x/lpt1.x/lpt2.x/lpt3.x/lpt4.x/lpt5.x/lpt6.x/lpt7.x/lpt8.x/lpt9.x'
-B = 'data/l.x/lp.x/lpt.x/lpt0.x/lp~741.x/lp~742.x/lp~743.x/lp~744.x/lp~745.x/lp~746.x/lp~747.x/lp~748.x/lp~749.x'
-
-A = 'data/x.l/x.lp/x.lpt/x.lpt0/x.lpt1/x.lpt2/x.lpt3/x.lpt4/x.lpt5/x.lpt6/x.lpt7/x.lpt8/x.lpt9'
-B = 'data/x.l/x.lp/x.lpt/x.lpt0/x.lpt1/x.lpt2/x.lpt3/x.lpt4/x.lpt5/x.lpt6/x.lpt7/x.lpt8/x.lpt9'
-
-A = 'data/lx/lpx/lptx/lpt0x/lpt1x/lpt2x/lpt3x/lpt4x/lpt5x/lpt6x/lpt7x/lpt8x/lpt9x'
-B = 'data/lx/lpx/lptx/lpt0x/lpt1x/lpt2x/lpt3x/lpt4x/lpt5x/lpt6x/lpt7x/lpt8x/lpt9x'
-
-A = 'data/xl/xlp/xlpt/xlpt0/xlpt1/xlpt2/xlpt3/xlpt4/xlpt5/xlpt6/xlpt7/xlpt8/xlpt9'
-B = 'data/xl/xlp/xlpt/xlpt0/xlpt1/xlpt2/xlpt3/xlpt4/xlpt5/xlpt6/xlpt7/xlpt8/xlpt9'
-
-A = 'data/con/p/pr/prn/a/au/aux/n/nu/nul'
-B = 'data/co~6e/p/pr/pr~6e/a/au/au~78/n/nu/nu~6c'
-
-A = 'data/CON/P/PR/PRN/A/AU/AUX/N/NU/NUL'
-B = 'data/_c_o_n/_p/_p_r/_p_r_n/_a/_a_u/_a_u_x/_n/_n_u/_n_u_l'
-
-A = 'data/con.x/p.x/pr.x/prn.x/a.x/au.x/aux.x/n.x/nu.x/nul.x'
-B = 'data/co~6e.x/p.x/pr.x/pr~6e.x/a.x/au.x/au~78.x/n.x/nu.x/nu~6c.x'
-
-A = 'data/x.con/x.p/x.pr/x.prn/x.a/x.au/x.aux/x.n/x.nu/x.nul'
-B = 'data/x.con/x.p/x.pr/x.prn/x.a/x.au/x.aux/x.n/x.nu/x.nul'
-
-A = 'data/conx/px/prx/prnx/ax/aux/auxx/nx/nux/nulx'
-B = 'data/conx/px/prx/prnx/ax/au~78/auxx/nx/nux/nulx'
-
-A = 'data/xcon/xp/xpr/xprn/xa/xau/xaux/xn/xnu/xnul'
-B = 'data/xcon/xp/xpr/xprn/xa/xau/xaux/xn/xnu/xnul'
-
-A = 'data/a./au./aux./auxy./aux.'
-B = 'data/a~2e/au~2e/au~78~2e/auxy~2e/au~78~2e'
-
-A = 'data/c./co./con./cony./con.'
-B = 'data/c~2e/co~2e/co~6e~2e/cony~2e/co~6e~2e'
-
-A = 'data/p./pr./prn./prny./prn.'
-B = 'data/p~2e/pr~2e/pr~6e~2e/prny~2e/pr~6e~2e'
-
-A = 'data/n./nu./nul./nuly./nul.'
-B = 'data/n~2e/nu~2e/nu~6c~2e/nuly~2e/nu~6c~2e'
-
-A = 'data/l./lp./lpt./lpt1./lpt1y./lpt1.'
-B = 'data/l~2e/lp~2e/lpt~2e/lp~741~2e/lpt1y~2e/lp~741~2e'
-
-A = 'data/lpt9./lpt9y./lpt9.'
-B = 'data/lp~749~2e/lpt9y~2e/lp~749~2e'
-
-A = 'data/com./com1./com1y./com1.'
-B = 'data/com~2e/co~6d1~2e/com1y~2e/co~6d1~2e'
-
-A = 'data/com9./com9y./com9.'
-B = 'data/co~6d9~2e/com9y~2e/co~6d9~2e'
-
-A = 'data/a /au /aux /auxy /aux '
-B = 'data/a~20/au~20/aux~20/auxy~20/aux~20'
-
-largest unhashed path
-A = 'data/123456789-123456789-123456789-123456789-123456789-unhashed--xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-B = 'data/123456789-123456789-123456789-123456789-123456789-unhashed--xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-
-shortest hashed path
-A = 'data/123456789-123456789-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
-B = 'dh/123456789-123456789-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxe9c55002b50bf5181e7a6fc1f60b126e2a6fcf71'
-
-changing one char in part that's hashed away produces a different hash
-A = 'data/123456789-123456789-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxy-123456789-123456'
-B = 'dh/123456789-123456789-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxd24fa4455faf8a94350c18e5eace7c2bb17af706'
-
-uppercase hitting length limit due to encoding
-A = 'data/A23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-B = 'dh/a23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxcbbc657029b41b94ed510d05feb6716a5c03bc6b'
-
-A = 'data/Z23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-B = 'dh/z23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxx938f32a725c89512833fb96b6602dd9ebff51ddd'
-
-compare with lowercase not hitting limit
-A = 'data/a23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-B = 'data/a23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-
-A = 'data/z23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-B = 'data/z23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-
-not hitting limit with any of these
-A = 'data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&\'()+,-.;=[]^`{}xxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-B = 'data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&\'()+,-.;=[]^`{}xxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-
-underbar hitting length limit due to encoding
-A = 'data/_23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-B = 'dh/_23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxx9921a01af50feeabc060ce00eee4cba6efc31d2b'
-
-tilde hitting length limit due to encoding
-A = 'data/~23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-B = 'dh/~7e23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx9cec6f97d569c10995f785720044ea2e4227481b'
-
-Windows reserved characters hitting length limit
-A = 'data/<23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-B = 'dh/~3c23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxee67d8f275876ca1ef2500fc542e63c885c4e62d'
-
-A = 'data/>23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-B = 'dh/~3e23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx387a85a5b1547cc9136310c974df716818458ddb'
-
-A = 'data/:23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-B = 'dh/~3a23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx2e4154fb571d13d22399c58cc4ef4858e4b75999'
-
-A = 'data/"23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-B = 'dh/~2223456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxfc7e3ec7b0687ee06ed8c32fef0eb0c1980259f5'
-
-A = 'data/\\23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-B = 'dh/~5c23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx944e1f2b7110687e116e0d151328ac648b06ab4a'
-
-A = 'data/|23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-B = 'dh/~7c23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx28b23dd3fd0242946334126ab62bcd772aac32f4'
-
-A = 'data/?23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-B = 'dh/~3f23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxa263022d3994d2143d98f94f431eef8b5e7e0f8a'
-
-A = 'data/*23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-B = 'dh/~2a23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx0e7e6020e3c00ba7bb7893d84ca2966fbf53e140'
-
-initial space hitting length limit
-A = 'data/ 23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-B = 'dh/~2023456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx92acbc78ef8c0b796111629a02601f07d8aec4ea'
-
-initial dot hitting length limit
-A = 'data/.23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-B = 'dh/~2e23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxdbe19cc6505b3515ab9228cebf877ad07075168f'
-
-trailing space in filename hitting length limit
-A = 'data/123456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-1234 '
-B = 'dh/123456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxx0025dc73e04f97426db4893e3bf67d581dc6d066'
-
-trailing dot in filename hitting length limit
-A = 'data/123456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-1234.'
-B = 'dh/123456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxx85a16cf03ee7feba8a5abc626f1ba9886d01e89d'
-
-initial space in directory hitting length limit
-A = 'data/ x/456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-B = 'dh/~20x/456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx1b3a3b712b2ac00d6af14ae8b4c14fdbf904f516'
-
-initial dot in directory hitting length limit
-A = 'data/.x/456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-B = 'dh/~2ex/456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx39dbc4c193a5643a8936fc69c3363cd7ac91ab14'
-
-trailing space in directory hitting length limit
-A = 'data/x /456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-B = 'dh/x~20/456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx2253c341df0b5290790ad312cd8499850f2273e5'
-
-trailing dot in directory hitting length limit
-A = 'data/x./456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-B = 'dh/x~2e/456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxcc0324d696d34562b44b5138db08ee1594ccc583'
-
-with directories that need direncoding, hitting length limit
-A = 'data/x.i/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-B = 'dh/x.i.hg/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxa4c4399bdf81c67dbbbb7060aa0124d8dea94f74'
-
-A = 'data/x.d/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-B = 'dh/x.d.hg/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxx1303fa90473b230615f5b3ea7b660e881ae5270a'
-
-A = 'data/x.hg/5789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-B = 'dh/x.hg.hg/5789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxx26d724a8af68e7a4e4455e6602ea9adbd0eb801f'
-
-Windows reserved filenames, hitting length limit
-A = 'data/con/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-B = 'dh/co~6e/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxc0794d4f4c605a2617900eb2563d7113cf6ea7d3'
-
-A = 'data/prn/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-B = 'dh/pr~6e/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx64db876e1a9730e27236cb9b167aff942240e932'
-
-A = 'data/aux/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-B = 'dh/au~78/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx8a178558405ca6fb4bbd75446dfa186f06751a0d'
-
-A = 'data/nul/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-B = 'dh/nu~6c/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxc5e51b6fec1bd07bd243b053a0c3f7209855b886'
-
-A = 'data/com1/6789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-B = 'dh/co~6d1/6789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx32f5f44ece3bb62b9327369ca84cc19c86259fcd'
-
-A = 'data/com9/6789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-B = 'dh/co~6d9/6789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx734360b28c66a3230f55849fe8926206d229f990'
-
-A = 'data/lpt1/6789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-B = 'dh/lp~741/6789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxe6f16ab4b6b0637676b2842b3345c9836df46ef7'
-
-A = 'data/lpt9/6789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-B = 'dh/lp~749/6789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxa475814c51acead3e44f2ff801f0c4903f986157'
-
-non-reserved names, just not hitting limit
-A = 'data/123456789-123456789-123456789-123456789-123456789-/com/com0/lpt/lpt0/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-B = 'data/123456789-123456789-123456789-123456789-123456789-/com/com0/lpt/lpt0/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
-
-hashed path with largest untruncated 1st dir
-A = 'data/12345678/-123456789-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
-B = 'dh/12345678/-123456789-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxx4e9e9e384d00929a93b6835fbf976eb32321ff3c'
-
-hashed path with smallest truncated 1st dir
-A = 'data/123456789/123456789-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
-B = 'dh/12345678/123456789-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxx1f4e4ec5f2be76e109bfaa8e31c062fe426d5490'
-
-hashed path with largest untruncated two dirs
-A = 'data/12345678/12345678/9-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
-B = 'dh/12345678/12345678/9-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxx3332d8329d969cf835542a9f2cbcfb385b6cf39d'
-
-hashed path with smallest truncated two dirs
-A = 'data/123456789/123456789/123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
-B = 'dh/12345678/12345678/123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx9699559798247dffa18717138859be5f8874840e'
-
-hashed path with largest untruncated three dirs
-A = 'data/12345678/12345678/12345678/89-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
-B = 'dh/12345678/12345678/12345678/89-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxf0a2b053bb1369cce02f78c217d6a7aaea18c439'
-
-hashed path with smallest truncated three dirs
-A = 'data/123456789/123456789/123456789/123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
-B = 'dh/12345678/12345678/12345678/123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-1c6f8284967384ec13985a046d3553179d9d03cd'
-
-hashed path with largest untruncated four dirs
-A = 'data/12345678/12345678/12345678/12345678/789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
-B = 'dh/12345678/12345678/12345678/12345678/789-123456789-hashed----xxxxxxxxx-xxxxxxx0d30c99049d8f0ff97b94d4ef302027e8d54c6fd'
-
-hashed path with smallest truncated four dirs
-A = 'data/123456789/123456789/123456789/123456789/123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
-B = 'dh/12345678/12345678/12345678/12345678/123456789-hashed----xxxxxxxxx-xxxxxxxxx-x46162779e1a771810b37a737f82ae7ed33771402'
-
-hashed path with largest untruncated five dirs
-A = 'data/12345678/12345678/12345678/12345678/12345678/6789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
-B = 'dh/12345678/12345678/12345678/12345678/12345678/6789-hashed----xxxxxxxxx-xxxxxxxbfe752ddc8b003c2790c66a9f2eb1ea75c114390'
-
-hashed path with smallest truncated five dirs
-A = 'data/123456789/123456789/123456789/123456789/123456789/hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
-B = 'dh/12345678/12345678/12345678/12345678/12345678/hashed----xxxxxxxxx-xxxxxxxxx-xxb94c27b3532fa880cdd572b1c514785cab7b6ff2'
-
-hashed path with largest untruncated six dirs
-A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/ed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
-B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/ed----xxxxxxxxx-xxxxxxxcd8cc5483a0f3be409e0e5d4bf9e36e113c59235'
-
-hashed path with smallest truncated six dirs
-A = 'data/123456789/123456789/123456789/123456789/123456789/123456789/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
-B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/xxxxxxxxx-xxxxxxxxx-xxx47dd6f616f833a142da00701b334cebbf640da06'
-
-hashed path with largest untruncated seven dirs
-A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/xxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
-B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/xxxxxx-xxxxxxx1c8ed635229fc22efe51035feeadeb4c8a0ecb82'
-
-hashed path with smallest truncated seven dirs
-A = 'data/123456789/123456789/123456789/123456789/123456789/123456789/123456789/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
-B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/xxxxxxxxx-xxxx298ff7d33f8ce6db57930837ffea2fb2f48bb926'
-
-hashed path with largest untruncated eight dirs
-(directory 8 is dropped because it hits _maxshortdirslen)
-A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345678/xxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
-B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/xxxxxxx-xxxxxxc8996ccd41b471f768057181a4d59d2febe7277d'
-
-hashed path with smallest truncated eight dirs
-(directory 8 is dropped because it hits _maxshortdirslen)
-A = 'data/123456789/123456789/123456789/123456789/123456789/123456789/123456789/123456789/xxxxxxxxx-xxxxxxxxx-123456789-123456'
-B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/xxxxxxxxx-xxxx4fa04a839a6bda93e1c21c713f2edcbd16e8890d'
-
-hashed path with largest non-dropped directory 8
-(just not hitting the _maxshortdirslen boundary)
-A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
-B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxx4d43d1ccaa20efbfe99ec779dc063611536ff2c5'
-
-...adding one truncated char to dir 1..7 won't drop dir 8
-A = 'data/12345678x/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
-B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxx0f9efce65189cc60fd90fe4ffd49d7b58bbe0f2e'
-
-A = 'data/12345678/12345678x/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
-B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxx945ca395708cafdd54a94501859beabd3e243921'
-
-A = 'data/12345678/12345678/12345678x/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
-B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxac62bf6898c4fd0502146074547c11caa751a327'
-
-A = 'data/12345678/12345678/12345678/12345678x/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
-B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxx2ae5a2baed7983fae8974d0ca06c6bf08b9aee92'
-
-A = 'data/12345678/12345678/12345678/12345678/12345678x/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
-B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxx214aba07b6687532a43d1e9eaf6e88cfca96b68c'
-
-A = 'data/12345678/12345678/12345678/12345678/12345678/12345678x/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
-B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxe7a022ae82f0f55cf4e0498e55ba59ea4ebb55bf'
-
-A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678x/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
-B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxb51ce61164996a80f36ce3cfe64b62d519aedae3'
-
-hashed path with shortest dropped directory 8
-(just hitting the _maxshortdirslen boundary)
-A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/123456/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
-B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/xxxxxxxxx-xxxx11fa9873cc6c3215eae864528b5530a04efc6cfe'
-
-hashed path that drops dir 8 due to dot or space at end is
-encoded, and thus causing to hit _maxshortdirslen
-A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/1234./-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
-B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/-xxxxxxxxx-xxx602df9b45bec564e2e1f0645d5140dddcc76ed58'
-
-A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/1234 /-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
-B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/-xxxxxxxxx-xxxd99ff212bc84b4d1f70cd6b0071e3ef69d4e12ce'
-
-... with dir 8 short enough for encoding
-A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12./xx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
-B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12~2e/xx-xxxxx7baeb5ed7f14a586ee1cacecdbcbff70032d1b3c'
-
-A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12 /xx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
-B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12~20/xx-xxxxxcf79ca9795f77d7f75745da36807e5d772bd5182'
-
-Extensions are replicated on hashed paths. Note that
-we only get to encode files that end in .i or .d inside the
-store. Encoded filenames are thus bound in length.
-A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.345.i'
-B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxc10ad03b5755ed524f5286aab1815dfe07729438.i'
-
-A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.345.d'
-B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxx9eec83381f2b39ef5ac8b4ecdf2c94f7983f57c8.d'
-
-A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456.i'
-B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxb7796dc7d175cfb0bb8a7728f58f6ebec9042568.i'
-
-A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.34567.i'
-B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxb515857a6bfeef017c4894d8df42458ac65d55b8.i'
-
-A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.345678.i'
-B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxb05a0f247bc0a776211cd6a32ab714fd9cc09f2b.i'
-
-A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789.i'
-B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxf192b48bff08d9e0e12035fb52bc58c70de72c94.i'
-
-A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-.i'
-B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxx435551e0ed4c7b083b9ba83cee916670e02e80ad.i'
-
-A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-1.i'
-B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxa7f74eb98d8d58b716356dfd26e2f9aaa65d6a9a.i'
-
-A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12.i'
-B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxed68d9bd43b931f0b100267fee488d65a0c66f62.i'
-
-A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-123.i'
-B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxx5cea44de2b642d2ba2b4a30693ffb1049644d698.i'
-
-A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-1234.i'
-B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxx68462f62a7f230b39c1b5400d73ec35920990b7e.i'
-
-A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345.i'
-B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxx4cb852a314c6da240a83eec94761cdd71c6ec22e.i'
-
-A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz-ABCDEFGHIJKLMNOPRSTUVWXYZ-1234567890-xxxxxxxxx-xxxxxxxxx-xxxxxxxx-xxxxxxxxx-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww.i'
-B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxx93352aa50377751d9e5ebdf52da1e6e69a6887a6.i'
-
-paths outside data/ can be encoded
-A = 'metadata/dir/00manifest.i'
-B = 'metadata/dir/00manifest.i'
-
-A = 'metadata/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345678/00manifest.i'
-B = 'dh/ata/12345678/12345678/12345678/12345678/12345678/12345678/12345678/00manife0a4da1f89aa2aa9eb0896eb451288419049781b4.i'
-
--- a/tests/test-import-git.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-import-git.t Wed Jun 06 13:31:24 2018 -0400
@@ -566,7 +566,7 @@
>>> fp = open('binary.diff', 'rb')
>>> data = fp.read()
>>> fp.close()
- >>> open('binary.diff', 'wb').write(data.replace(b'\n', b'\r\n'))
+ >>> open('binary.diff', 'wb').write(data.replace(b'\n', b'\r\n')) and None
$ rm binary2
$ hg import --no-commit binary.diff
applying binary.diff
--- a/tests/test-import-merge.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-import-merge.t Wed Jun 06 13:31:24 2018 -0400
@@ -143,7 +143,7 @@
$ hg export 2 | head -7 > ../a.patch
$ hg export tip > out
>>> apatch = open("../a.patch", "ab")
- >>> apatch.write("".join(open("out").readlines()[7:]))
+ >>> apatch.write(b"".join(open("out", 'rb').readlines()[7:])) and None
$ cd ..
$ hg clone -qr0 repo3 repo3-clone
--- a/tests/test-known.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-known.t Wed Jun 06 13:31:24 2018 -0400
@@ -1,5 +1,3 @@
-#require killdaemons
-
= Test the known() protocol function =
Create a test repository:
--- a/tests/test-largefiles-wireproto.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-largefiles-wireproto.t Wed Jun 06 13:31:24 2018 -0400
@@ -26,7 +26,7 @@
> patterns=glob:**.dat
> usercache=${USERCACHE}
> [web]
- > allow_archive = zip
+ > allow-archive = zip
> [hooks]
> precommit=sh -c "echo \\"Invoking status precommit hook\\"; hg status"
> EOF
--- a/tests/test-lfs-largefiles.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-lfs-largefiles.t Wed Jun 06 13:31:24 2018 -0400
@@ -1,4 +1,4 @@
-#require no-reposimplestore
+#require no-reposimplestore no-chg
This tests the interaction between the largefiles and lfs extensions, and
conversion from largefiles -> lfs.
--- a/tests/test-lfs-pointer.py Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-lfs-pointer.py Wed Jun 06 13:31:24 2018 -0400
@@ -6,6 +6,10 @@
# make it runnable using python directly without run-tests.py
sys.path[0:0] = [os.path.join(os.path.dirname(__file__), '..')]
+# Import something from Mercurial, so the module loader gets initialized.
+from mercurial import pycompat
+del pycompat # unused for now
+
from hgext.lfs import pointer
def tryparse(text):
@@ -14,28 +18,28 @@
r = pointer.deserialize(text)
print('ok')
except Exception as ex:
- print(ex)
+ print((b'%s' % ex).decode('ascii'))
if r:
text2 = r.serialize()
if text2 != text:
print('reconstructed text differs')
return r
-t = ('version https://git-lfs.github.com/spec/v1\n'
- 'oid sha256:4d7a214614ab2935c943f9e0ff69d22eadbb8f32b1'
- '258daaa5e2ca24d17e2393\n'
- 'size 12345\n'
- 'x-foo extra-information\n')
+t = (b'version https://git-lfs.github.com/spec/v1\n'
+ b'oid sha256:4d7a214614ab2935c943f9e0ff69d22eadbb8f32b1'
+ b'258daaa5e2ca24d17e2393\n'
+ b'size 12345\n'
+ b'x-foo extra-information\n')
-tryparse('')
+tryparse(b'')
tryparse(t)
-tryparse(t.replace('git-lfs', 'unknown'))
-tryparse(t.replace('v1\n', 'v1\n\n'))
-tryparse(t.replace('sha256', 'ahs256'))
-tryparse(t.replace('sha256:', ''))
-tryparse(t.replace('12345', '0x12345'))
-tryparse(t.replace('extra-information', 'extra\0information'))
-tryparse(t.replace('extra-information', 'extra\ninformation'))
-tryparse(t.replace('x-foo', 'x_foo'))
-tryparse(t.replace('oid', 'blobid'))
-tryparse(t.replace('size', 'size-bytes').replace('oid', 'object-id'))
+tryparse(t.replace(b'git-lfs', b'unknown'))
+tryparse(t.replace(b'v1\n', b'v1\n\n'))
+tryparse(t.replace(b'sha256', b'ahs256'))
+tryparse(t.replace(b'sha256:', b''))
+tryparse(t.replace(b'12345', b'0x12345'))
+tryparse(t.replace(b'extra-information', b'extra\0information'))
+tryparse(t.replace(b'extra-information', b'extra\ninformation'))
+tryparse(t.replace(b'x-foo', b'x_foo'))
+tryparse(t.replace(b'oid', b'blobid'))
+tryparse(t.replace(b'size', b'size-bytes').replace(b'oid', b'object-id'))
--- a/tests/test-lfs-pointer.py.out Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-lfs-pointer.py.out Wed Jun 06 13:31:24 2018 -0400
@@ -1,12 +1,12 @@
-missed keys: oid, size
+missing lfs pointer keys: oid, size
ok
-unexpected value: version='https://unknown.github.com/spec/v1'
+unexpected lfs pointer value: version='https://unknown.github.com/spec/v1'
cannot parse git-lfs text: 'version https://git-lfs.github.com/spec/v1\n\noid sha256:4d7a214614ab2935c943f9e0ff69d22eadbb8f32b1258daaa5e2ca24d17e2393\nsize 12345\nx-foo extra-information\n'
-unexpected value: oid='ahs256:4d7a214614ab2935c943f9e0ff69d22eadbb8f32b1258daaa5e2ca24d17e2393'
-unexpected value: oid='4d7a214614ab2935c943f9e0ff69d22eadbb8f32b1258daaa5e2ca24d17e2393'
-unexpected value: size='0x12345'
+unexpected lfs pointer value: oid='ahs256:4d7a214614ab2935c943f9e0ff69d22eadbb8f32b1258daaa5e2ca24d17e2393'
+unexpected lfs pointer value: oid='4d7a214614ab2935c943f9e0ff69d22eadbb8f32b1258daaa5e2ca24d17e2393'
+unexpected lfs pointer value: size='0x12345'
ok
cannot parse git-lfs text: 'version https://git-lfs.github.com/spec/v1\noid sha256:4d7a214614ab2935c943f9e0ff69d22eadbb8f32b1258daaa5e2ca24d17e2393\nsize 12345\nx-foo extra\ninformation\n'
-unexpected key: x_foo
-missed keys: oid
-missed keys: oid, size
+unexpected lfs pointer key: x_foo
+missing lfs pointer keys: oid
+missing lfs pointer keys: oid, size
--- a/tests/test-lfs-serve-access.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-lfs-serve-access.t Wed Jun 06 13:31:24 2018 -0400
@@ -1,4 +1,4 @@
-#require serve no-reposimplestore
+#require serve no-reposimplestore no-chg
$ cat >> $HGRCPATH <<EOF
> [extensions]
--- a/tests/test-lfs-serve.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-lfs-serve.t Wed Jun 06 13:31:24 2018 -0400
@@ -1,5 +1,5 @@
#testcases lfsremote-on lfsremote-off
-#require serve no-reposimplestore
+#require serve no-reposimplestore no-chg
This test splits `hg serve` with and without using the extension into separate
tests cases. The tests are broken down as follows, where "LFS"/"No-LFS"
--- a/tests/test-lfs-test-server.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-lfs-test-server.t Wed Jun 06 13:31:24 2018 -0400
@@ -1,4 +1,4 @@
-#require no-reposimplestore
+#require no-reposimplestore no-chg
#testcases git-server hg-server
#if git-server
@@ -880,7 +880,6 @@
$LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob)
#endif
- $ rm $DAEMON_PIDS
$ mkdir $TESTTMP/lfs-server2
$ cd $TESTTMP/lfs-server2
#if no-windows git-server
--- a/tests/test-lfs.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-lfs.t Wed Jun 06 13:31:24 2018 -0400
@@ -1,4 +1,4 @@
-#require no-reposimplestore
+#require no-reposimplestore no-chg
# Initial setup
--- a/tests/test-log.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-log.t Wed Jun 06 13:31:24 2018 -0400
@@ -1943,7 +1943,7 @@
test that parent prevent a changeset to be hidden
$ hg up 1 -q --hidden
- updating to a hidden changeset a765632148dc
+ updated to hidden changeset a765632148dc
(hidden revision 'a765632148dc' is pruned)
$ hg log --template='{rev}:{node}\n'
1:a765632148dc55d38c35c4f247c618701886cb2f
--- a/tests/test-logexchange.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-logexchange.t Wed Jun 06 13:31:24 2018 -0400
@@ -2,10 +2,10 @@
=============================================
$ cat >> $HGRCPATH << EOF
+ > [ui]
+ > ssh = $PYTHON "$TESTDIR/dummyssh"
> [alias]
> glog = log -G -T '{rev}:{node|short} {desc}'
- > [experimental]
- > remotenames = True
> [extensions]
> remotenames =
> show =
@@ -52,7 +52,13 @@
$ cd ..
- $ hg clone server client
+ $ hg clone ssh://user@dummy/server client
+ requesting all changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 9 changesets with 9 changes to 9 files (+1 heads)
+ new changesets 18d04c59bb5d:3e1487808078
updating to branch default
8 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-merge-tools.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-merge-tools.t Wed Jun 06 13:31:24 2018 -0400
@@ -1,7 +1,8 @@
test merge-tools configuration - mostly exercising filemerge.py
$ unset HGMERGE # make sure HGMERGE doesn't interfere with the test
- $ hg init
+ $ hg init repo
+ $ cd repo
revision 0
@@ -328,6 +329,183 @@
# hg resolve --list
R f
+executable set to python script that succeeds:
+
+ $ cat > "$TESTTMP/myworkingmerge.py" <<EOF
+ > def myworkingmergefn(ui, repo, args, **kwargs):
+ > return False
+ > EOF
+ $ beforemerge
+ [merge-tools]
+ false.whatever=
+ true.priority=1
+ true.executable=cat
+ # hg update -C 1
+ $ hg merge -r 2 --config merge-tools.true.executable="python:$TESTTMP/myworkingmerge.py:myworkingmergefn"
+ merging f
+ 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ aftermerge
+ # cat f
+ revision 1
+ space
+ # hg stat
+ M f
+ # hg resolve --list
+ R f
+
+executable set to python script that fails:
+
+ $ cat > "$TESTTMP/mybrokenmerge.py" <<EOF
+ > def mybrokenmergefn(ui, repo, args, **kwargs):
+ > ui.write(b"some fail message\n")
+ > return True
+ > EOF
+ $ beforemerge
+ [merge-tools]
+ false.whatever=
+ true.priority=1
+ true.executable=cat
+ # hg update -C 1
+ $ hg merge -r 2 --config merge-tools.true.executable="python:$TESTTMP/mybrokenmerge.py:mybrokenmergefn"
+ merging f
+ some fail message
+ abort: $TESTTMP/mybrokenmerge.py hook failed
+ [255]
+ $ aftermerge
+ # cat f
+ revision 1
+ space
+ # hg stat
+ ? f.orig
+ # hg resolve --list
+ U f
+
+executable set to python script that is missing function:
+
+ $ beforemerge
+ [merge-tools]
+ false.whatever=
+ true.priority=1
+ true.executable=cat
+ # hg update -C 1
+ $ hg merge -r 2 --config merge-tools.true.executable="python:$TESTTMP/myworkingmerge.py:missingFunction"
+ merging f
+ abort: $TESTTMP/myworkingmerge.py does not have function: missingFunction
+ [255]
+ $ aftermerge
+ # cat f
+ revision 1
+ space
+ # hg stat
+ ? f.orig
+ # hg resolve --list
+ U f
+
+executable set to missing python script:
+
+ $ beforemerge
+ [merge-tools]
+ false.whatever=
+ true.priority=1
+ true.executable=cat
+ # hg update -C 1
+ $ hg merge -r 2 --config merge-tools.true.executable="python:$TESTTMP/missingpythonscript.py:mergefn"
+ merging f
+ abort: loading python merge script failed: $TESTTMP/missingpythonscript.py
+ [255]
+ $ aftermerge
+ # cat f
+ revision 1
+ space
+ # hg stat
+ ? f.orig
+ # hg resolve --list
+ U f
+
+executable set to python script but callable function is missing:
+
+ $ beforemerge
+ [merge-tools]
+ false.whatever=
+ true.priority=1
+ true.executable=cat
+ # hg update -C 1
+ $ hg merge -r 2 --config merge-tools.true.executable="python:$TESTTMP/myworkingmerge.py"
+ abort: invalid 'python:' syntax: python:$TESTTMP/myworkingmerge.py
+ [255]
+ $ aftermerge
+ # cat f
+ revision 1
+ space
+ # hg stat
+ # hg resolve --list
+ U f
+
+executable set to python script but callable function is empty string:
+
+ $ beforemerge
+ [merge-tools]
+ false.whatever=
+ true.priority=1
+ true.executable=cat
+ # hg update -C 1
+ $ hg merge -r 2 --config merge-tools.true.executable="python:$TESTTMP/myworkingmerge.py:"
+ abort: invalid 'python:' syntax: python:$TESTTMP/myworkingmerge.py:
+ [255]
+ $ aftermerge
+ # cat f
+ revision 1
+ space
+ # hg stat
+ # hg resolve --list
+ U f
+
+executable set to python script but callable function is missing and path contains colon:
+
+ $ beforemerge
+ [merge-tools]
+ false.whatever=
+ true.priority=1
+ true.executable=cat
+ # hg update -C 1
+ $ hg merge -r 2 --config merge-tools.true.executable="python:$TESTTMP/some:dir/myworkingmerge.py"
+ abort: invalid 'python:' syntax: python:$TESTTMP/some:dir/myworkingmerge.py
+ [255]
+ $ aftermerge
+ # cat f
+ revision 1
+ space
+ # hg stat
+ # hg resolve --list
+ U f
+
+executable set to python script filename that contains spaces:
+
+ $ mkdir -p "$TESTTMP/my path"
+ $ cat > "$TESTTMP/my path/my working merge with spaces in filename.py" <<EOF
+ > def myworkingmergefn(ui, repo, args, **kwargs):
+ > return False
+ > EOF
+ $ beforemerge
+ [merge-tools]
+ false.whatever=
+ true.priority=1
+ true.executable=cat
+ # hg update -C 1
+ $ hg merge -r 2 --config "merge-tools.true.executable=python:$TESTTMP/my path/my working merge with spaces in filename.py:myworkingmergefn"
+ merging f
+ 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ aftermerge
+ # cat f
+ revision 1
+ space
+ # hg stat
+ M f
+ # hg resolve --list
+ R f
+
#if unix-permissions
environment variables in true.executable are handled:
@@ -1446,7 +1624,7 @@
merging f
labellocal: "working copy"
labelother: "merge rev"
- output (arg): "$TESTTMP/f"
+ output (arg): "$TESTTMP/repo/f"
output (contents):
<<<<<<< working copy: uitmpl 1
revision 1
@@ -1485,7 +1663,7 @@
merging f
labellocal: "working copy: tooltmpl ef83787e2614"
labelother: "merge rev: tooltmpl 0185f4e0cf02"
- output (arg): "$TESTTMP/f"
+ output (arg): "$TESTTMP/repo/f"
output (contents):
<<<<<<< working copy: tooltmpl ef83787e2614
revision 1
@@ -1585,7 +1763,7 @@
$ hg update -q -C 2
$ hg merge -y -r tip --tool echo --config merge-tools.echo.args='$base $local $other $output'
merging f and f.txt to f.txt
- */f~base.* */f~local.*.txt */f~other.*.txt $TESTTMP/f.txt (glob)
+ */f~base.* */f~local.*.txt */f~other.*.txt $TESTTMP/repo/f.txt (glob)
0 files updated, 1 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
@@ -1600,7 +1778,7 @@
> --config merge-tools.echo.args='$base $local $other $output' \
> --config experimental.mergetempdirprefix=$TESTTMP/hgmerge.
merging f and f.txt to f.txt
- $TESTTMP/hgmerge.*/f~base $TESTTMP/hgmerge.*/f~local.txt $TESTTMP/hgmerge.*/f~other.txt $TESTTMP/f.txt (glob)
+ $TESTTMP/hgmerge.*/f~base $TESTTMP/hgmerge.*/f~local.txt $TESTTMP/hgmerge.*/f~other.txt $TESTTMP/repo/f.txt (glob)
0 files updated, 1 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
@@ -1668,3 +1846,5 @@
couldn't find merge tool true (for pattern f)
couldn't find merge tool true
f = false
+
+ $ cd ..
--- a/tests/test-merge6.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-merge6.t Wed Jun 06 13:31:24 2018 -0400
@@ -42,6 +42,7 @@
adding file changes
added 1 changesets with 1 changes to 1 files (+1 heads)
new changesets b90e70beeb58
+ 1 changesets became public
(run 'hg heads' to see heads, 'hg merge' to merge)
$ hg merge
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-minifileset.py Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-minifileset.py Wed Jun 06 13:31:24 2018 -0400
@@ -18,21 +18,24 @@
if f(*args):
print('unexpected: %r should exclude %r' % (text, args))
-check('all()', [('a.php', 123), ('b.txt', 0)], [])
-check('none()', [], [('a.php', 123), ('b.txt', 0)])
-check('!!!!((!(!!all())))', [], [('a.php', 123), ('b.txt', 0)])
+check(b'all()', [(b'a.php', 123), (b'b.txt', 0)], [])
+check(b'none()', [], [(b'a.php', 123), (b'b.txt', 0)])
+check(b'!!!!((!(!!all())))', [], [(b'a.php', 123), (b'b.txt', 0)])
-check('"path:a" & (**.b | **.c)', [('a/b.b', 0), ('a/c.c', 0)], [('b/c.c', 0)])
-check('(path:a & **.b) | **.c',
- [('a/b.b', 0), ('a/c.c', 0), ('b/c.c', 0)], [])
+check(b'"path:a" & (**.b | **.c)',
+ [(b'a/b.b', 0), (b'a/c.c', 0)], [(b'b/c.c', 0)])
+check(b'(path:a & **.b) | **.c',
+ [(b'a/b.b', 0), (b'a/c.c', 0), (b'b/c.c', 0)], [])
-check('**.bin - size("<20B")', [('b.bin', 21)], [('a.bin', 11), ('b.txt', 21)])
+check(b'**.bin - size("<20B")',
+ [(b'b.bin', 21)], [(b'a.bin', 11), (b'b.txt', 21)])
-check('!!**.bin or size(">20B") + "path:bin" or !size(">10")',
- [('a.bin', 11), ('b.txt', 21), ('bin/abc', 11)],
- [('a.notbin', 11), ('b.txt', 11), ('bin2/abc', 11)])
+check(b'!!**.bin or size(">20B") + "path:bin" or !size(">10")',
+ [(b'a.bin', 11), (b'b.txt', 21), (b'bin/abc', 11)],
+ [(b'a.notbin', 11), (b'b.txt', 11), (b'bin2/abc', 11)])
-check('(**.php and size(">10KB")) | **.zip | ("path:bin" & !"path:bin/README") '
- ' | size(">1M")',
- [('a.php', 15000), ('a.zip', 0), ('bin/a', 0), ('bin/README', 1e7)],
- [('a.php', 5000), ('b.zip2', 0), ('t/bin/a', 0), ('bin/README', 1)])
+check(
+ b'(**.php and size(">10KB")) | **.zip | ("path:bin" & !"path:bin/README") '
+ b' | size(">1M")',
+ [(b'a.php', 15000), (b'a.zip', 0), (b'bin/a', 0), (b'bin/README', 1e7)],
+ [(b'a.php', 5000), (b'b.zip2', 0), (b't/bin/a', 0), (b'bin/README', 1)])
--- a/tests/test-minirst.py Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-minirst.py Wed Jun 06 13:31:24 2018 -0400
@@ -1,11 +1,13 @@
from __future__ import absolute_import, print_function
-import pprint
from mercurial import (
minirst,
)
+from mercurial.utils import (
+ stringutil,
+)
def debugformat(text, form, **kwargs):
- if form == 'html':
+ if form == b'html':
print("html format:")
out = minirst.format(text, style=form, **kwargs)
else:
@@ -14,11 +16,11 @@
print("-" * 70)
if type(out) == tuple:
- print(out[0][:-1])
+ print(out[0][:-1].decode('utf8'))
print("-" * 70)
- pprint.pprint(out[1])
+ print(stringutil.pprint(out[1]).decode('utf8'))
else:
- print(out[:-1])
+ print(out[:-1].decode('utf8'))
print("-" * 70)
print()
@@ -26,7 +28,7 @@
print("== %s ==" % title)
debugformat(text, 60, **kwargs)
debugformat(text, 30, **kwargs)
- debugformat(text, 'html', **kwargs)
+ debugformat(text, b'html', **kwargs)
paragraphs = b"""
This is some text in the first paragraph.
@@ -37,7 +39,7 @@
\n \n \nThe third and final paragraph.
"""
-debugformats(b'paragraphs', paragraphs)
+debugformats('paragraphs', paragraphs)
definitions = b"""
A Term
@@ -52,7 +54,7 @@
Definition.
"""
-debugformats(b'definitions', definitions)
+debugformats('definitions', definitions)
literals = br"""
The fully minimized form is the most
@@ -76,7 +78,7 @@
with '::' disappears in the final output.
"""
-debugformats(b'literals', literals)
+debugformats('literals', literals)
lists = b"""
- This is the first list item.
@@ -127,7 +129,7 @@
* This is the third bullet
"""
-debugformats(b'lists', lists)
+debugformats('lists', lists)
options = b"""
There is support for simple option lists,
@@ -153,7 +155,7 @@
--foo bar baz
"""
-debugformats(b'options', options)
+debugformats('options', options)
fields = b"""
:a: First item.
@@ -166,7 +168,7 @@
:much too large: This key is big enough to get its own line.
"""
-debugformats(b'fields', fields)
+debugformats('fields', fields)
containers = b"""
Normal output.
@@ -184,14 +186,14 @@
Debug output.
"""
-debugformats(b'containers (normal)', containers)
-debugformats(b'containers (verbose)', containers, keep=['verbose'])
-debugformats(b'containers (debug)', containers, keep=['debug'])
-debugformats(b'containers (verbose debug)', containers,
- keep=['verbose', 'debug'])
+debugformats('containers (normal)', containers)
+debugformats('containers (verbose)', containers, keep=[b'verbose'])
+debugformats('containers (debug)', containers, keep=[b'debug'])
+debugformats('containers (verbose debug)', containers,
+ keep=[b'verbose', b'debug'])
roles = b"""Please see :hg:`add`."""
-debugformats(b'roles', roles)
+debugformats('roles', roles)
sections = b"""
@@ -207,7 +209,7 @@
Markup: ``foo`` and :hg:`help`
------------------------------
"""
-debugformats(b'sections', sections)
+debugformats('sections', sections)
admonitions = b"""
@@ -225,7 +227,7 @@
This is danger
"""
-debugformats(b'admonitions', admonitions)
+debugformats('admonitions', admonitions)
comments = b"""
Some text.
@@ -241,7 +243,7 @@
Empty comment above
"""
-debugformats(b'comments', comments)
+debugformats('comments', comments)
data = [[b'a', b'b', b'c'],
@@ -251,9 +253,9 @@
rst = minirst.maketable(data, 2, True)
table = b''.join(rst)
-print(table)
+print(table.decode('utf8'))
-debugformats(b'table', table)
+debugformats('table', table)
data = [[b's', b'long', b'line\ngoes on here'],
[b'', b'xy', b'tried to fix here\n by indenting']]
@@ -261,7 +263,6 @@
rst = minirst.maketable(data, 1, False)
table = b''.join(rst)
-print(table)
+print(table.decode('utf8'))
-debugformats(b'table+nl', table)
-
+debugformats('table+nl', table)
--- a/tests/test-mq-qclone-http.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-mq-qclone-http.t Wed Jun 06 13:31:24 2018 -0400
@@ -1,5 +1,3 @@
-#require killdaemons
-
hide outer repo
$ hg init
--- a/tests/test-mq-qimport.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-mq-qimport.t Wed Jun 06 13:31:24 2018 -0400
@@ -1,5 +1,3 @@
-#require killdaemons
-
$ cat > writelines.py <<EOF
> import sys
> path = sys.argv[1]
--- a/tests/test-narrow-expanddirstate.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-narrow-expanddirstate.t Wed Jun 06 13:31:24 2018 -0400
@@ -72,29 +72,31 @@
> for f in repo[b'.'].manifest().walk(added):
> repo.dirstate.normallookup(f)
>
- > def makeds(ui, repo):
- > def wrapds(orig, self):
- > ds = orig(self)
- > class expandingdirstate(ds.__class__):
- > @hgutil.propertycache
- > def _map(self):
- > ret = super(expandingdirstate, self)._map
- > with repo.wlock(), repo.lock(), repo.transaction(
- > b'expandnarrowspec'):
- > expandnarrowspec(ui, repo,
- > encoding.environ.get(b'DIRSTATEINCLUDES'))
- > return ret
- > ds.__class__ = expandingdirstate
- > return ds
- > return wrapds
+ > def wrapds(ui, repo, ds):
+ > class expandingdirstate(ds.__class__):
+ > @hgutil.propertycache
+ > def _map(self):
+ > ret = super(expandingdirstate, self)._map
+ > with repo.wlock(), repo.lock(), repo.transaction(
+ > b'expandnarrowspec'):
+ > expandnarrowspec(ui, repo,
+ > encoding.environ.get(b'DIRSTATEINCLUDES'))
+ > return ret
+ > ds.__class__ = expandingdirstate
+ > return ds
>
> def reposetup(ui, repo):
- > extensions.wrapfilecache(localrepo.localrepository, b'dirstate',
- > makeds(ui, repo))
- > def overridepatch(orig, *args, **kwargs):
+ > class expandingrepo(repo.__class__):
+ > def _makedirstate(self):
+ > dirstate = super(expandingrepo, self)._makedirstate()
+ > return wrapds(ui, repo, dirstate)
+ > repo.__class__ = expandingrepo
+ >
+ > def extsetup(unused_ui):
+ > def overridepatch(orig, ui, repo, *args, **kwargs):
> with repo.wlock():
> expandnarrowspec(ui, repo, encoding.environ.get(b'PATCHINCLUDES'))
- > return orig(*args, **kwargs)
+ > return orig(ui, repo, *args, **kwargs)
>
> extensions.wrapfunction(patch, b'patch', overridepatch)
> EOF
--- a/tests/test-notify.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-notify.t Wed Jun 06 13:31:24 2018 -0400
@@ -131,6 +131,9 @@
notify.diffstat
Set to True to include a diffstat before diff content. Default: True.
+ notify.showfunc
+ If set, override "diff.showfunc" for the diff content. Default: None.
+
notify.merge
If True, send notifications for merge changesets. Default: True.
@@ -647,3 +650,99 @@
To: baz@test.com, foo@bar
with template
+
+showfunc diff
+ $ cat <<EOF >> $HGRCPATH
+ > showfunc = True
+ > template =
+ > maxdiff = -1
+ > EOF
+ $ cd a
+ $ cat > f1 << EOF
+ > int main() {
+ > int a = 0;
+ > int b = 1;
+ > int c = 2;
+ > int d = 3;
+ > return a + b + c + d;
+ > }
+ > EOF
+ $ hg commit -Am addfunction
+ adding f1
+ $ hg --cwd ../b pull ../a
+ pulling from ../a
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+ new changesets b86bc16ff894
+ MIME-Version: 1.0
+ Content-Type: text/plain; charset="us-ascii"
+ Content-Transfer-Encoding: 7bit
+ Date: * (glob)
+ Subject: addfunction
+ From: test@test.com
+ X-Hg-Notification: changeset b86bc16ff894
+ Message-Id: <hg.b86bc16ff894.*.*@*> (glob)
+ To: baz@test.com, foo@bar
+
+ changeset b86bc16ff894
+ diffs (11 lines):
+
+ diff -r 14721b538ae3 -r b86bc16ff894 f1
+ --- /dev/null Thu Jan 01 00:00:00 1970 +0000
+ +++ b/f1 Thu Jan 01 00:00:00 1970 +0000
+ @@ -0,0 +1,7 @@
+ +int main() {
+ + int a = 0;
+ + int b = 1;
+ + int c = 2;
+ + int d = 3;
+ + return a + b + c + d;
+ +}
+ (run 'hg update' to get a working copy)
+ $ cat > f1 << EOF
+ > int main() {
+ > int a = 0;
+ > int b = 1;
+ > int c = 2;
+ > int e = 3;
+ > return a + b + c + e;
+ > }
+ > EOF
+ $ hg commit -m changefunction
+ $ hg --cwd ../b --config notify.showfunc=True pull ../a
+ pulling from ../a
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+ new changesets e81040e9838c
+ MIME-Version: 1.0
+ Content-Type: text/plain; charset="us-ascii"
+ Content-Transfer-Encoding: 7bit
+ Date: * (glob)
+ Subject: changefunction
+ From: test@test.com
+ X-Hg-Notification: changeset e81040e9838c
+ Message-Id: <hg.e81040e9838c.*.*@*> (glob)
+ To: baz@test.com, foo@bar
+
+ changeset e81040e9838c
+ diffs (12 lines):
+
+ diff -r b86bc16ff894 -r e81040e9838c f1
+ --- a/f1 Thu Jan 01 00:00:00 1970 +0000
+ +++ b/f1 Thu Jan 01 00:00:00 1970 +0000
+ @@ -2,6 +2,6 @@ int main() {
+ int a = 0;
+ int b = 1;
+ int c = 2;
+ - int d = 3;
+ - return a + b + c + d;
+ + int e = 3;
+ + return a + b + c + e;
+ }
+ (run 'hg update' to get a working copy)
--- a/tests/test-obshistory.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-obshistory.t Wed Jun 06 13:31:24 2018 -0400
@@ -55,9 +55,9 @@
(use --hidden to access hidden revisions)
[255]
$ hg update --hidden "desc(A0)"
- updating to a hidden changeset 471f378eab4c
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to hidden changeset 471f378eab4c
(hidden revision '471f378eab4c' was rewritten as: 4ae3a4151de9)
- 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
Test output with pruned commit
==============================
@@ -118,9 +118,9 @@
(use --hidden to access hidden revisions)
[255]
$ hg up --hidden -r 'desc(B0)'
- updating to a hidden changeset 0dec01379d3b
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to hidden changeset 0dec01379d3b
(hidden revision '0dec01379d3b' is pruned)
- 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
Test output with splitted commit
================================
@@ -195,9 +195,9 @@
(use --hidden to access hidden revisions)
[255]
$ hg update --hidden 'min(desc(A0))'
- updating to a hidden changeset 471597cad322
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to hidden changeset 471597cad322
(hidden revision '471597cad322' was split as: 337fec4d2edc, f257fde29c7a)
- 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
Test output with lots of splitted commit
========================================
@@ -294,9 +294,9 @@
(use --hidden to access hidden revisions)
[255]
$ hg update --hidden 'min(desc(A0))'
- updating to a hidden changeset de7290d8b885
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to hidden changeset de7290d8b885
(hidden revision 'de7290d8b885' was split as: 337fec4d2edc, f257fde29c7a and 2 more)
- 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
Test output with folded commit
==============================
@@ -373,17 +373,17 @@
(use --hidden to access hidden revisions)
[255]
$ hg update --hidden 'desc(A0)'
- updating to a hidden changeset 471f378eab4c
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ updated to hidden changeset 471f378eab4c
(hidden revision '471f378eab4c' was rewritten as: eb5a0daa2192)
- 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ hg update 0dec01379d3b
abort: hidden revision '0dec01379d3b' was rewritten as: eb5a0daa2192!
(use --hidden to access hidden revisions)
[255]
$ hg update --hidden 'desc(B0)'
- updating to a hidden changeset 0dec01379d3b
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to hidden changeset 0dec01379d3b
(hidden revision '0dec01379d3b' was rewritten as: eb5a0daa2192)
- 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
Test output with divergence
===========================
@@ -416,9 +416,9 @@
summary: ROOT
$ hg update --hidden 'desc(A0)'
- updating to a hidden changeset 471f378eab4c
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to hidden changeset 471f378eab4c
(hidden revision '471f378eab4c' was rewritten as: fdf9bde5129a)
- 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg commit --amend -m "A2"
2 new content-divergent changesets
$ hg log --hidden -G
@@ -456,9 +456,9 @@
(use --hidden to access hidden revisions)
[255]
$ hg update --hidden 'desc(A0)'
- updating to a hidden changeset 471f378eab4c
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to hidden changeset 471f378eab4c
(hidden revision '471f378eab4c' has diverged)
- 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
Test output with amended + folded commit
========================================
@@ -551,13 +551,13 @@
(use --hidden to access hidden revisions)
[255]
$ hg update --hidden 'desc(A0)'
- updating to a hidden changeset 471f378eab4c
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ updated to hidden changeset 471f378eab4c
(hidden revision '471f378eab4c' was rewritten as: eb5a0daa2192)
- 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ hg update --hidden 0dec01379d3b
- updating to a hidden changeset 0dec01379d3b
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to hidden changeset 0dec01379d3b
(hidden revision '0dec01379d3b' was rewritten as: eb5a0daa2192)
- 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg update 0dec01379d3b
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg update --hidden 'desc(B0)'
--- a/tests/test-obsmarker-template.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-obsmarker-template.t Wed Jun 06 13:31:24 2018 -0400
@@ -75,9 +75,9 @@
Check templates
---------------
$ hg up 'desc(A0)' --hidden
- updating to a hidden changeset 471f378eab4c
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to hidden changeset 471f378eab4c
(hidden revision '471f378eab4c' was rewritten as: d004c8f274b9)
- 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
Predecessors template should show current revision as it is the working copy
$ hg tlog
@@ -148,9 +148,9 @@
summary: ROOT
$ hg up 'desc(A1)' --hidden
- updating to a hidden changeset a468dc9b3633
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to hidden changeset a468dc9b3633
(hidden revision 'a468dc9b3633' was rewritten as: d004c8f274b9)
- 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
Predecessors template should show current revision as it is the working copy
$ hg tlog
@@ -417,9 +417,9 @@
---------------
$ hg up 'obsolete()' --hidden
- updating to a hidden changeset 471597cad322
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to hidden changeset 471597cad322
(hidden revision '471597cad322' was split as: 337fec4d2edc, f257fde29c7a)
- 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
Predecessors template should show current revision as it is the working copy
$ hg tlog
@@ -627,9 +627,9 @@
---------------
$ hg up 'desc(A0)' --hidden
- updating to a hidden changeset 471f378eab4c
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ updated to hidden changeset 471f378eab4c
(hidden revision '471f378eab4c' was rewritten as: eb5a0daa2192)
- 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
Predecessors template should show current revision as it is the working copy
$ hg tlog
@@ -653,9 +653,9 @@
o ea207398892e
$ hg up 'desc(B0)' --hidden
- updating to a hidden changeset 0dec01379d3b
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to hidden changeset 0dec01379d3b
(hidden revision '0dec01379d3b' was rewritten as: eb5a0daa2192)
- 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
Predecessors template should show both predecessors as they should be both
displayed
@@ -820,9 +820,9 @@
summary: ROOT
$ hg update --hidden 'desc(A0)'
- updating to a hidden changeset 471f378eab4c
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to hidden changeset 471f378eab4c
(hidden revision '471f378eab4c' was rewritten as: fdf9bde5129a)
- 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg commit --amend -m "A2"
2 new content-divergent changesets
$ hg log --hidden -G
@@ -894,9 +894,9 @@
---------------
$ hg up 'desc(A0)' --hidden
- updating to a hidden changeset 471f378eab4c
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to hidden changeset 471f378eab4c
(hidden revision '471f378eab4c' has diverged)
- 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
Predecessors template should show current revision as it is the working copy
$ hg tlog
@@ -1161,9 +1161,9 @@
---------------
$ hg up 'desc(A0)' --hidden
- updating to a hidden changeset 471f378eab4c
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ updated to hidden changeset 471f378eab4c
(hidden revision '471f378eab4c' was rewritten as: eb5a0daa2192)
- 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
Predecessors template should show current revision as it is the working copy
$ hg tlog
@@ -1187,9 +1187,9 @@
o ea207398892e
$ hg up 'desc(B0)' --hidden
- updating to a hidden changeset 0dec01379d3b
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to hidden changeset 0dec01379d3b
(hidden revision '0dec01379d3b' was rewritten as: eb5a0daa2192)
- 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
Predecessors template should both predecessors as they are visible
$ hg tlog
@@ -1219,9 +1219,9 @@
o ea207398892e
$ hg up 'desc(B1)' --hidden
- updating to a hidden changeset b7ea6d14e664
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to hidden changeset b7ea6d14e664
(hidden revision 'b7ea6d14e664' was rewritten as: eb5a0daa2192)
- 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
Predecessors template should both predecessors as they are visible
$ hg tlog
@@ -1623,9 +1623,9 @@
$ hg up -r "desc(B0)" --hidden
- updating to a hidden changeset 0dec01379d3b
+ 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ updated to hidden changeset 0dec01379d3b
(hidden revision '0dec01379d3b' is pruned)
- 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ hg tlog
o f897c6137566
| Predecessors: 2:0dec01379d3b
@@ -2087,9 +2087,9 @@
o ea207398892e
$ hg up --hidden 4
- updating to a hidden changeset 9bd10a0775e4
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to hidden changeset 9bd10a0775e4
(hidden revision '9bd10a0775e4' has diverged)
- 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg rebase -r 7 -d 8 --config extensions.rebase=
rebasing 7:ba2ed02b0c9a "Add A,B,C"
$ hg tlog
@@ -2333,9 +2333,9 @@
obsoleted 1 changesets
$ hg up -r "desc(A0)" --hidden
- updating to a hidden changeset 471f378eab4c
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to hidden changeset 471f378eab4c
(hidden revision '471f378eab4c' is pruned)
- 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg commit --amend -m "A2"
$ hg debugobsolete --record-parent `getid "."`
obsoleted 1 changesets
@@ -2344,9 +2344,9 @@
------------
$ hg up "desc(A0)" --hidden
- updating to a hidden changeset 471f378eab4c
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to hidden changeset 471f378eab4c
(hidden revision '471f378eab4c' is pruned)
- 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg tlog
@ 471f378eab4c
|
@@ -2499,9 +2499,9 @@
---------------
$ hg up 'desc("A0")' --hidden
- updating to a hidden changeset 471597cad322
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to hidden changeset 471597cad322
(hidden revision '471597cad322' is pruned)
- 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
# todo: the obsfate output is not ideal
$ hg fatelog
@@ -2512,9 +2512,9 @@
o ea207398892e
$ hg up -r 'desc("A2")' --hidden
- updating to a hidden changeset 0d0ef4bdf70e
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to hidden changeset 0d0ef4bdf70e
(hidden revision '0d0ef4bdf70e' is pruned)
- 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg fatelog --hidden
@ 0d0ef4bdf70e
--- a/tests/test-obsolete-checkheads.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-obsolete-checkheads.t Wed Jun 06 13:31:24 2018 -0400
@@ -80,6 +80,7 @@
pulling from $TESTTMP/remote
searching for changes
no changes found
+ 1 changesets became public
$ hg log -G --hidden
@ 71e3228bffe1 (draft) add new
|
--- a/tests/test-obsolete.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-obsolete.t Wed Jun 06 13:31:24 2018 -0400
@@ -62,7 +62,7 @@
$ hg tip
-1:000000000000 (public) [tip ]
$ hg up --hidden tip --quiet
- updating to a hidden changeset 97b7c2d76b18
+ updated to hidden changeset 97b7c2d76b18
(hidden revision '97b7c2d76b18' is pruned)
Killing a single changeset with itself should fail
@@ -931,7 +931,7 @@
$ echo "evolution.exchange=True" >> $HGRCPATH
$ echo "evolution.createmarkers=True" >> $HGRCPATH
- $ rm hg.pid access.log errors.log
+ $ rm access.log errors.log
#endif
Several troubles on the same changeset (create an unstable and bumped changeset)
@@ -1318,16 +1318,18 @@
$ cat >$TESTTMP/test_extension.py << EOF
> from __future__ import absolute_import
> from mercurial.i18n import _
- > from mercurial import cmdutil, registrar
+ > from mercurial import cmdutil, pycompat, registrar
+ > from mercurial.utils import stringutil
>
> cmdtable = {}
> command = registrar.command(cmdtable)
> @command(b"amendtransient",[], _(b'hg amendtransient [rev]'))
> def amend(ui, repo, *pats, **opts):
- > opts['message'] = 'Test'
- > opts['logfile'] = None
- > cmdutil.amend(ui, repo, repo['.'], {}, pats, opts)
- > ui.write(b'%s\n' % repo.changelog.headrevs())
+ > opts = pycompat.byteskwargs(opts)
+ > opts[b'message'] = b'Test'
+ > opts[b'logfile'] = None
+ > cmdutil.amend(ui, repo, repo[b'.'], {}, pats, opts)
+ > ui.write(b'%s\n' % stringutil.pprint(repo.changelog.headrevs()))
> EOF
$ cat >> $HGRCPATH << EOF
> [extensions]
@@ -1365,7 +1367,7 @@
> hidden = repoview.filterrevs(repo, b'visible')
> if sorted(hidden1) != sorted(hidden):
> print("cache inconsistency")
- > bkmstoreinst._repo.currenttransaction().addpostclose('test_extension', trhook)
+ > bkmstoreinst._repo.currenttransaction().addpostclose(b'test_extension', trhook)
> orig(bkmstoreinst, *args, **kwargs)
> def extsetup(ui):
> extensions.wrapfunction(bookmarks.bmstore, '_recordchange',
--- a/tests/test-pager.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-pager.t Wed Jun 06 13:31:24 2018 -0400
@@ -260,9 +260,9 @@
> from mercurial import commands, registrar
> cmdtable = {}
> command = registrar.command(cmdtable)
- > @command(b'fortytwo', [], 'fortytwo', norepo=True)
+ > @command(b'fortytwo', [], b'fortytwo', norepo=True)
> def fortytwo(ui, *opts):
- > ui.write('42\n')
+ > ui.write(b'42\n')
> return 42
> EOF
@@ -377,8 +377,8 @@
$ cat > $TESTTMP/pushbufferpager.py <<EOF
> def uisetup(ui):
> ui.pushbuffer()
- > ui.pager('mycmd')
- > ui.write('content\n')
+ > ui.pager(b'mycmd')
+ > ui.write(b'content\n')
> ui.write(ui.popbuffer())
> EOF
--- a/tests/test-parse-date.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-parse-date.t Wed Jun 06 13:31:24 2018 -0400
@@ -243,8 +243,8 @@
>>> today = datetime.date.today().strftime("%b %d")
>>> yesterday = (datetime.date.today() - datetime.timedelta(days=1)).strftime("%b %d")
>>> dates = open('dates', 'w')
- >>> dates.write(today + '\n')
- >>> dates.write(yesterday + '\n')
+ >>> dates.write(today + '\n') and None
+ >>> dates.write(yesterday + '\n') and None
>>> dates.close()
$ hg ci -d "`sed -n '1p' dates`" -m "today is a good day to code"
$ hg log -d today --template '{desc}\n'
--- a/tests/test-parseindex.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-parseindex.t Wed Jun 06 13:31:24 2018 -0400
@@ -26,6 +26,7 @@
summary: change foo
$ cat >> test.py << EOF
+ > from __future__ import print_function
> from mercurial import changelog, vfs
> from mercurial.node import *
>
@@ -56,9 +57,9 @@
> return wrapper
>
> cl = changelog.changelog(opener('.hg/store'))
- > print len(cl), 'revisions:'
+ > print(len(cl), 'revisions:')
> for r in cl:
- > print short(cl.node(r))
+ > print(short(cl.node(r)))
> EOF
$ $PYTHON test.py
2 revisions:
@@ -74,33 +75,34 @@
$ cd a
$ $PYTHON <<EOF
+ > from __future__ import print_function
> from mercurial import changelog, vfs
> cl = changelog.changelog(vfs.vfs('.hg/store'))
- > print 'good heads:'
+ > print('good heads:')
> for head in [0, len(cl) - 1, -1]:
- > print'%s: %r' % (head, cl.reachableroots(0, [head], [0]))
- > print 'bad heads:'
+ > print('%s: %r' % (head, cl.reachableroots(0, [head], [0])))
+ > print('bad heads:')
> for head in [len(cl), 10000, -2, -10000, None]:
- > print '%s:' % head,
+ > print('%s:' % head, end=' ')
> try:
> cl.reachableroots(0, [head], [0])
- > print 'uncaught buffer overflow?'
+ > print('uncaught buffer overflow?')
> except (IndexError, TypeError) as inst:
- > print inst
- > print 'good roots:'
+ > print(inst)
+ > print('good roots:')
> for root in [0, len(cl) - 1, -1]:
- > print '%s: %r' % (root, cl.reachableroots(root, [len(cl) - 1], [root]))
- > print 'out-of-range roots are ignored:'
+ > print('%s: %r' % (root, cl.reachableroots(root, [len(cl) - 1], [root])))
+ > print('out-of-range roots are ignored:')
> for root in [len(cl), 10000, -2, -10000]:
- > print '%s: %r' % (root, cl.reachableroots(root, [len(cl) - 1], [root]))
- > print 'bad roots:'
+ > print('%s: %r' % (root, cl.reachableroots(root, [len(cl) - 1], [root])))
+ > print('bad roots:')
> for root in [None]:
- > print '%s:' % root,
+ > print('%s:' % root, end=' ')
> try:
> cl.reachableroots(root, [len(cl) - 1], [root])
- > print 'uncaught error?'
+ > print('uncaught error?')
> except TypeError as inst:
- > print inst
+ > print(inst)
> EOF
good heads:
0: [0]
@@ -137,10 +139,10 @@
$ $PYTHON <<EOF
> data = open("limit/.hg/store/00changelog.i", "rb").read()
- > for n, p in [('limit', '\0\0\0\x02'), ('segv', '\0\x01\0\0')]:
+ > for n, p in [(b'limit', b'\0\0\0\x02'), (b'segv', b'\0\x01\0\0')]:
> # corrupt p1 at rev0 and p2 at rev1
> d = data[:24] + p + data[28:127 + 28] + p + data[127 + 32:]
- > open(n + "/.hg/store/00changelog.i", "wb").write(d)
+ > open(n + b"/.hg/store/00changelog.i", "wb").write(d)
> EOF
$ hg -R limit debugindex -f1 -c
@@ -164,6 +166,7 @@
1 2 1 -1 base 66 65 66 1.01538 66 0 0.00000
$ cat <<EOF > test.py
+ > from __future__ import print_function
> import sys
> from mercurial import changelog, vfs
> cl = changelog.changelog(vfs.vfs(sys.argv[1]))
@@ -177,12 +180,12 @@
> ('find_deepest', lambda: cl.ancestor(n0, n1)),
> ]
> for l, f in ops:
- > print l + ':',
+ > print(l + ':', end=' ')
> try:
> f()
- > print 'uncaught buffer overflow?'
- > except ValueError, inst:
- > print inst
+ > print('uncaught buffer overflow?')
+ > except ValueError as inst:
+ > print(inst)
> EOF
$ $PYTHON test.py limit/.hg/store
--- a/tests/test-parseindex2.py Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-parseindex2.py Wed Jun 06 13:31:24 2018 -0400
@@ -15,6 +15,7 @@
)
from mercurial import (
policy,
+ pycompat,
)
parsers = policy.importmod(r'parsers')
@@ -24,7 +25,7 @@
return int(q & 0xFFFF)
def offset_type(offset, type):
- return long(long(offset) << 16 | type)
+ return int(int(offset) << 16 | type)
indexformatng = ">Qiiiiii20s12x"
@@ -65,46 +66,50 @@
return index, cache
-data_inlined = '\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x01\x8c' \
- '\x00\x00\x04\x07\x00\x00\x00\x00\x00\x00\x15\x15\xff\xff\xff' \
- '\xff\xff\xff\xff\xff\xebG\x97\xb7\x1fB\x04\xcf\x13V\x81\tw\x1b' \
- 'w\xdduR\xda\xc6\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' \
- 'x\x9c\x9d\x93?O\xc30\x10\xc5\xf7|\x8a\xdb\x9a\xa8m\x06\xd8*\x95' \
- '\x81B\xa1\xa2\xa2R\xcb\x86Pd\x9a\x0b5$vd_\x04\xfd\xf6\x9c\xff@' \
- '\x11!\x0b\xd9\xec\xf7\xbbw\xe7gG6\xad6\x04\xdaN\xc0\x92\xa0$)' \
- '\xb1\x82\xa2\xd1%\x16\xa4\x8b7\xa9\xca\xd4-\xb2Y\x02\xfc\xc9' \
- '\xcaS\xf9\xaeX\xed\xb6\xd77Q\x02\x83\xd4\x19\xf5--Y\xea\xe1W' \
- '\xab\xed\x10\xceR\x0f_\xdf\xdf\r\xe1,\xf5\xf0\xcb\xf5 \xceR\x0f' \
- '_\xdc\x0e\x0e\xc3R\x0f_\xae\x96\x9b!\x9e\xa5\x1e\xbf\xdb,\x06' \
- '\xc7q\x9a/\x88\x82\xc3B\xea\xb5\xb4TJ\x93\xb6\x82\x0e\xe16\xe6' \
- 'KQ\xdb\xaf\xecG\xa3\xd1 \x01\xd3\x0b_^\xe8\xaa\xa0\xae\xad\xd1' \
- '&\xbef\x1bz\x08\xb0|\xc9Xz\x06\xf6Z\x91\x90J\xaa\x17\x90\xaa' \
- '\xd2\xa6\x11$5C\xcf\xba#\xa0\x03\x02*2\x92-\xfc\xb1\x94\xdf\xe2' \
- '\xae\xb8\'m\x8ey0^\x85\xd3\x82\xb4\xf0`:\x9c\x00\x8a\xfd\x01' \
- '\xb0\xc6\x86\x8b\xdd\xae\x80\xf3\xa9\x9fd\x16\n\x00R%\x1a\x06' \
- '\xe9\xd8b\x98\x1d\xf4\xf3+\x9bf\x01\xd8p\x1b\xf3.\xed\x9f^g\xc3' \
- '^\xd9W81T\xdb\xd5\x04sx|\xf2\xeb\xd6`%?x\xed"\x831\xbf\xf3\xdc' \
- 'b\xeb%gaY\xe1\xad\x9f\xb9f\'1w\xa9\xa5a\x83s\x82J\xb98\xbc4\x8b' \
- '\x83\x00\x9f$z\xb8#\xa5\xb1\xdf\x98\xd9\xec\x1b\x89O\xe3Ts\x9a4' \
- '\x17m\x8b\xfc\x8f\xa5\x95\x9a\xfc\xfa\xed,\xe5|\xa1\xfe\x15\xb9' \
- '\xbc\xb2\x93\x1f\xf2\x95\xff\xdf,\x1a\xc5\xe7\x17*\x93Oz:>\x0e'
+data_inlined = (
+ b'\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x01\x8c'
+ b'\x00\x00\x04\x07\x00\x00\x00\x00\x00\x00\x15\x15\xff\xff\xff'
+ b'\xff\xff\xff\xff\xff\xebG\x97\xb7\x1fB\x04\xcf\x13V\x81\tw\x1b'
+ b'w\xdduR\xda\xc6\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+ b'x\x9c\x9d\x93?O\xc30\x10\xc5\xf7|\x8a\xdb\x9a\xa8m\x06\xd8*\x95'
+ b'\x81B\xa1\xa2\xa2R\xcb\x86Pd\x9a\x0b5$vd_\x04\xfd\xf6\x9c\xff@'
+ b'\x11!\x0b\xd9\xec\xf7\xbbw\xe7gG6\xad6\x04\xdaN\xc0\x92\xa0$)'
+ b'\xb1\x82\xa2\xd1%\x16\xa4\x8b7\xa9\xca\xd4-\xb2Y\x02\xfc\xc9'
+ b'\xcaS\xf9\xaeX\xed\xb6\xd77Q\x02\x83\xd4\x19\xf5--Y\xea\xe1W'
+ b'\xab\xed\x10\xceR\x0f_\xdf\xdf\r\xe1,\xf5\xf0\xcb\xf5 \xceR\x0f'
+ b'_\xdc\x0e\x0e\xc3R\x0f_\xae\x96\x9b!\x9e\xa5\x1e\xbf\xdb,\x06'
+ b'\xc7q\x9a/\x88\x82\xc3B\xea\xb5\xb4TJ\x93\xb6\x82\x0e\xe16\xe6'
+ b'KQ\xdb\xaf\xecG\xa3\xd1 \x01\xd3\x0b_^\xe8\xaa\xa0\xae\xad\xd1'
+ b'&\xbef\x1bz\x08\xb0|\xc9Xz\x06\xf6Z\x91\x90J\xaa\x17\x90\xaa'
+ b'\xd2\xa6\x11$5C\xcf\xba#\xa0\x03\x02*2\x92-\xfc\xb1\x94\xdf\xe2'
+ b'\xae\xb8\'m\x8ey0^\x85\xd3\x82\xb4\xf0`:\x9c\x00\x8a\xfd\x01'
+ b'\xb0\xc6\x86\x8b\xdd\xae\x80\xf3\xa9\x9fd\x16\n\x00R%\x1a\x06'
+ b'\xe9\xd8b\x98\x1d\xf4\xf3+\x9bf\x01\xd8p\x1b\xf3.\xed\x9f^g\xc3'
+ b'^\xd9W81T\xdb\xd5\x04sx|\xf2\xeb\xd6`%?x\xed"\x831\xbf\xf3\xdc'
+ b'b\xeb%gaY\xe1\xad\x9f\xb9f\'1w\xa9\xa5a\x83s\x82J\xb98\xbc4\x8b'
+ b'\x83\x00\x9f$z\xb8#\xa5\xb1\xdf\x98\xd9\xec\x1b\x89O\xe3Ts\x9a4'
+ b'\x17m\x8b\xfc\x8f\xa5\x95\x9a\xfc\xfa\xed,\xe5|\xa1\xfe\x15\xb9'
+ b'\xbc\xb2\x93\x1f\xf2\x95\xff\xdf,\x1a\xc5\xe7\x17*\x93Oz:>\x0e'
+ )
-data_non_inlined = '\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01D\x19' \
- '\x00\x07e\x12\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff' \
- '\xff\xff\xff\xff\xd1\xf4\xbb\xb0\xbe\xfc\x13\xbd\x8c\xd3\x9d' \
- '\x0f\xcd\xd9;\x8c\x07\x8cJ/\x00\x00\x00\x00\x00\x00\x00\x00\x00' \
- '\x00\x00\x00\x00\x00\x00\x01D\x19\x00\x00\x00\x00\x00\xdf\x00' \
- '\x00\x01q\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\xff' \
- '\xff\xff\xff\xc1\x12\xb9\x04\x96\xa4Z1t\x91\xdfsJ\x90\xf0\x9bh' \
- '\x07l&\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' \
- '\x00\x01D\xf8\x00\x00\x00\x00\x01\x1b\x00\x00\x01\xb8\x00\x00' \
- '\x00\x01\x00\x00\x00\x02\x00\x00\x00\x01\xff\xff\xff\xff\x02\n' \
- '\x0e\xc6&\xa1\x92\xae6\x0b\x02i\xfe-\xe5\xbao\x05\xd1\xe7\x00' \
- '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01F' \
- '\x13\x00\x00\x00\x00\x01\xec\x00\x00\x03\x06\x00\x00\x00\x01' \
- '\x00\x00\x00\x03\x00\x00\x00\x02\xff\xff\xff\xff\x12\xcb\xeby1' \
- '\xb6\r\x98B\xcb\x07\xbd`\x8f\x92\xd9\xc4\x84\xbdK\x00\x00\x00' \
- '\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+data_non_inlined = (
+ b'\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01D\x19'
+ b'\x00\x07e\x12\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff'
+ b'\xff\xff\xff\xff\xd1\xf4\xbb\xb0\xbe\xfc\x13\xbd\x8c\xd3\x9d'
+ b'\x0f\xcd\xd9;\x8c\x07\x8cJ/\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+ b'\x00\x00\x00\x00\x00\x00\x01D\x19\x00\x00\x00\x00\x00\xdf\x00'
+ b'\x00\x01q\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\xff'
+ b'\xff\xff\xff\xc1\x12\xb9\x04\x96\xa4Z1t\x91\xdfsJ\x90\xf0\x9bh'
+ b'\x07l&\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+ b'\x00\x01D\xf8\x00\x00\x00\x00\x01\x1b\x00\x00\x01\xb8\x00\x00'
+ b'\x00\x01\x00\x00\x00\x02\x00\x00\x00\x01\xff\xff\xff\xff\x02\n'
+ b'\x0e\xc6&\xa1\x92\xae6\x0b\x02i\xfe-\xe5\xbao\x05\xd1\xe7\x00'
+ b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01F'
+ b'\x13\x00\x00\x00\x00\x01\xec\x00\x00\x03\x06\x00\x00\x00\x01'
+ b'\x00\x00\x00\x03\x00\x00\x00\x02\xff\xff\xff\xff\x12\xcb\xeby1'
+ b'\xb6\r\x98B\xcb\x07\xbd`\x8f\x92\xd9\xc4\x84\xbdK\x00\x00\x00'
+ b'\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+ )
def parse_index2(data, inline):
index, chunkcache = parsers.parse_index2(data, inline)
@@ -145,7 +150,7 @@
def testversionfail(testnumber, hexversion):
stdout, stderr = importparsers(hexversion)
# We include versionerrortext to distinguish from other ImportErrors.
- errtext = "ImportError: %s" % parsers.versionerrortext
+ errtext = b"ImportError: %s" % pycompat.sysbytes(parsers.versionerrortext)
if errtext not in stdout:
printhexfail(testnumber, hexversion, stdout,
expected="stdout to contain %r" % errtext)
--- a/tests/test-patch.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-patch.t Wed Jun 06 13:31:24 2018 -0400
@@ -92,7 +92,12 @@
Error exit (issue4746)
- $ hg import ../c/p --config ui.patch='sh -c "exit 1"'
+ $ cat >> exit1.py <<EOF
+ > import sys
+ > sys.exit(1)
+ > EOF
+
+ $ hg import ../c/p --config ui.patch="\"$PYTHON\" \"`pwd`/exit1.py\""
applying ../c/p
abort: patch command failed: exited with status 1
[255]
--- a/tests/test-pathencode.py Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-pathencode.py Wed Jun 06 13:31:24 2018 -0400
@@ -16,6 +16,7 @@
import sys
import time
from mercurial import (
+ pycompat,
store,
)
@@ -24,15 +25,15 @@
except NameError:
xrange = range
-validchars = set(map(chr, range(0, 256)))
+validchars = set(map(pycompat.bytechr, range(0, 256)))
alphanum = range(ord('A'), ord('Z'))
-for c in '\0/':
+for c in (b'\0', b'/'):
validchars.remove(c)
-winreserved = ('aux con prn nul'.split() +
- ['com%d' % i for i in xrange(1, 10)] +
- ['lpt%d' % i for i in xrange(1, 10)])
+winreserved = (b'aux con prn nul'.split() +
+ [b'com%d' % i for i in xrange(1, 10)] +
+ [b'lpt%d' % i for i in xrange(1, 10)])
def casecombinations(names):
'''Build all case-diddled combinations of names.'''
@@ -44,7 +45,7 @@
for c in itertools.combinations(xrange(len(r)), i):
d = r
for j in c:
- d = ''.join((d[:j], d[j].upper(), d[j + 1:]))
+ d = b''.join((d[:j], d[j:j + 1].upper(), d[j + 1:]))
combos.add(d)
return sorted(combos)
@@ -78,19 +79,19 @@
# looking at filelog names from a real-world, very large repo.
probtable = (
- ('t', 9.828), ('e', 9.042), ('s', 8.011), ('a', 6.801), ('i', 6.618),
- ('g', 5.053), ('r', 5.030), ('o', 4.887), ('p', 4.363), ('n', 4.258),
- ('l', 3.830), ('h', 3.693), ('_', 3.659), ('.', 3.377), ('m', 3.194),
- ('u', 2.364), ('d', 2.296), ('c', 2.163), ('b', 1.739), ('f', 1.625),
- ('6', 0.666), ('j', 0.610), ('y', 0.554), ('x', 0.487), ('w', 0.477),
- ('k', 0.476), ('v', 0.473), ('3', 0.336), ('1', 0.335), ('2', 0.326),
- ('4', 0.310), ('5', 0.305), ('9', 0.302), ('8', 0.300), ('7', 0.299),
- ('q', 0.298), ('0', 0.250), ('z', 0.223), ('-', 0.118), ('C', 0.095),
- ('T', 0.087), ('F', 0.085), ('B', 0.077), ('S', 0.076), ('P', 0.076),
- ('L', 0.059), ('A', 0.058), ('N', 0.051), ('D', 0.049), ('M', 0.046),
- ('E', 0.039), ('I', 0.035), ('R', 0.035), ('G', 0.028), ('U', 0.026),
- ('W', 0.025), ('O', 0.017), ('V', 0.015), ('H', 0.013), ('Q', 0.011),
- ('J', 0.007), ('K', 0.005), ('+', 0.004), ('X', 0.003), ('Y', 0.001),
+ (b't', 9.828), (b'e', 9.042), (b's', 8.011), (b'a', 6.801), (b'i', 6.618),
+ (b'g', 5.053), (b'r', 5.030), (b'o', 4.887), (b'p', 4.363), (b'n', 4.258),
+ (b'l', 3.830), (b'h', 3.693), (b'_', 3.659), (b'.', 3.377), (b'm', 3.194),
+ (b'u', 2.364), (b'd', 2.296), (b'c', 2.163), (b'b', 1.739), (b'f', 1.625),
+ (b'6', 0.666), (b'j', 0.610), (b'y', 0.554), (b'x', 0.487), (b'w', 0.477),
+ (b'k', 0.476), (b'v', 0.473), (b'3', 0.336), (b'1', 0.335), (b'2', 0.326),
+ (b'4', 0.310), (b'5', 0.305), (b'9', 0.302), (b'8', 0.300), (b'7', 0.299),
+ (b'q', 0.298), (b'0', 0.250), (b'z', 0.223), (b'-', 0.118), (b'C', 0.095),
+ (b'T', 0.087), (b'F', 0.085), (b'B', 0.077), (b'S', 0.076), (b'P', 0.076),
+ (b'L', 0.059), (b'A', 0.058), (b'N', 0.051), (b'D', 0.049), (b'M', 0.046),
+ (b'E', 0.039), (b'I', 0.035), (b'R', 0.035), (b'G', 0.028), (b'U', 0.026),
+ (b'W', 0.025), (b'O', 0.017), (b'V', 0.015), (b'H', 0.013), (b'Q', 0.011),
+ (b'J', 0.007), (b'K', 0.005), (b'+', 0.004), (b'X', 0.003), (b'Y', 0.001),
)
for c, _ in probtable:
@@ -121,12 +122,12 @@
# Special suffixes.
-internalsuffixcombos = casecombinations('.hg .i .d'.split())
+internalsuffixcombos = casecombinations(b'.hg .i .d'.split())
# The last component of a path, before a slash or at the end of a name.
lasttable = resttable + (
- (lambda rng: '', 95),
+ (lambda rng: b'', 95),
(lambda rng: rng.choice(internalsuffixcombos), 5),
)
@@ -142,13 +143,13 @@
l += len(p)
ps.append(p)
ps.append(pickfrom(rng, lasttable)(rng))
- return ''.join(ps)
+ return b''.join(ps)
def makepath(rng, j, k):
'''Construct a complete pathname.'''
- return ('data/' + '/'.join(makepart(rng, k) for _ in xrange(j)) +
- rng.choice(['.d', '.i']))
+ return (b'data/' + b'/'.join(makepart(rng, k) for _ in xrange(j)) +
+ rng.choice([b'.d', b'.i']))
def genpath(rng, count):
'''Generate random pathnames with gradually increasing lengths.'''
--- a/tests/test-phases-exchange.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-phases-exchange.t Wed Jun 06 13:31:24 2018 -0400
@@ -1,5 +1,3 @@
-#require killdaemons
-
$ cat >> $HGRCPATH << EOF
> [extensions]
> phasereport=$TESTDIR/testlib/ext-phase-report.py
@@ -169,6 +167,7 @@
pulling from ../alpha
searching for changes
no changes found
+ 1 changesets became public
test-debug-phase: move rev 2: 1 -> 0
$ hgph
o 4 public a-D - b555f63b6063
@@ -285,6 +284,7 @@
adding file changes
added 1 changesets with 1 changes to 1 files
new changesets b555f63b6063
+ 3 changesets became public
test-debug-phase: move rev 0: 1 -> 0
test-debug-phase: move rev 1: 1 -> 0
test-debug-phase: move rev 2: 1 -> 0
@@ -333,6 +333,7 @@
adding file changes
added 2 changesets with 2 changes to 2 files
new changesets d6bcb4f74035:145e75495359
+ 4 changesets became public
test-debug-phase: move rev 0: 1 -> 0
test-debug-phase: move rev 1: 1 -> 0
test-debug-phase: move rev 3: 1 -> 0
@@ -406,6 +407,7 @@
pulling from ../alpha
searching for changes
no changes found
+ 3 changesets became public
test-debug-phase: move rev 3: 1 -> 0
test-debug-phase: move rev 5: 1 -> 0
test-debug-phase: move rev 6: 1 -> 0
--- a/tests/test-profile.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-profile.t Wed Jun 06 13:31:24 2018 -0400
@@ -69,7 +69,7 @@
> from mercurial import registrar, commands
> cmdtable = {}
> command = registrar.command(cmdtable)
- > @command(b'sleep', [], 'hg sleep')
+ > @command(b'sleep', [], b'hg sleep')
> def sleep(ui, *args, **kwargs):
> time.sleep(0.1)
> EOF
@@ -123,13 +123,13 @@
> yield
> print('fooprof: end profile')
> def extsetup(ui):
- > ui.write('fooprof: loaded\n')
+ > ui.write(b'fooprof: loaded\n')
> EOF
$ cat > otherextension.py <<EOF
> from __future__ import absolute_import
> def extsetup(ui):
- > ui.write('otherextension: loaded\n')
+ > ui.write(b'otherextension: loaded\n')
> EOF
$ hg init b
--- a/tests/test-progress.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-progress.t Wed Jun 06 13:31:24 2018 -0400
@@ -16,10 +16,10 @@
> time.time = incrementingtime()
>
> @command(b'loop',
- > [('', 'total', '', 'override for total'),
- > ('', 'nested', False, 'show nested results'),
- > ('', 'parallel', False, 'show parallel sets of results')],
- > 'hg loop LOOPS',
+ > [(b'', b'total', b'', b'override for total'),
+ > (b'', b'nested', False, b'show nested results'),
+ > (b'', b'parallel', False, b'show parallel sets of results')],
+ > b'hg loop LOOPS',
> norepo=True)
> def loop(ui, loops, **opts):
> loops = int(loops)
--- a/tests/test-pull-branch.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-pull-branch.t Wed Jun 06 13:31:24 2018 -0400
@@ -170,6 +170,7 @@
adding file changes
added 1 changesets with 1 changes to 1 files (+1 heads)
new changesets 7d8ffa4c0b22
+ 13 changesets became public
(run 'hg heads' to see heads)
Make changes on default and branchC on tt
@@ -183,6 +184,7 @@
adding file changes
added 1 changesets with 1 changes to 1 files (+1 heads)
new changesets 2b94b54b6b5f
+ 1 changesets became public
(run 'hg heads' to see heads)
$ hg up -C default
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -219,6 +221,7 @@
adding file changes
added 2 changesets with 2 changes to 2 files (+2 heads)
new changesets eed40c14b407:e634733b0309
+ 1 changesets became public
(run 'hg heads .' to see heads, 'hg merge' to merge)
$ cd ..
--- a/tests/test-pull-bundle.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-pull-bundle.t Wed Jun 06 13:31:24 2018 -0400
@@ -1,3 +1,5 @@
+#require no-chg
+
$ hg init repo
$ cd repo
$ echo foo > foo
--- a/tests/test-pull-http.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-pull-http.t Wed Jun 06 13:31:24 2018 -0400
@@ -1,5 +1,3 @@
-#require killdaemons
-
#if no-windows
For debugging: this is a pretty simple test that is a good candidate
for tracking down network-related bugs. Sometimes a command in this
--- a/tests/test-pull-update.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-pull-update.t Wed Jun 06 13:31:24 2018 -0400
@@ -27,6 +27,7 @@
adding file changes
added 1 changesets with 1 changes to 1 files (+1 heads)
new changesets 107cefe13e42
+ 1 changesets became public
abort: uncommitted changes
[255]
$ hg --config extensions.strip= strip --no-backup tip
@@ -58,6 +59,7 @@
adding file changes
added 1 changesets with 1 changes to 1 files (+1 heads)
new changesets 800c91d5bfc1
+ 1 changesets became public
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
updated to "107cefe13e42: m"
1 other heads for branch "default"
@@ -80,6 +82,7 @@
adding file changes
added 1 changesets with 1 changes to 1 files (-1 heads)
new changesets 483b76ad4309
+ 1 changesets became public
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
Similarity between "hg update" and "hg pull -u" in handling bookmark
@@ -108,6 +111,7 @@
added 1 changesets with 1 changes to 1 files
adding remote bookmark active-after-pull
new changesets f815b3da6163
+ 1 changesets became public
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
(activating bookmark active-after-pull)
@@ -137,6 +141,7 @@
added 1 changesets with 1 changes to 1 files
adding remote bookmark active-after-pull
new changesets f815b3da6163
+ 1 changesets became public
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
(activating bookmark active-after-pull)
@@ -175,6 +180,7 @@
adding file changes
added 2 changesets with 1 changes to 1 files
new changesets f815b3da6163:b5e4babfaaa7
+ 1 changesets became public
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
(leaving bookmark active-before-pull)
@@ -202,6 +208,7 @@
adding file changes
added 2 changesets with 1 changes to 1 files
new changesets f815b3da6163:b5e4babfaaa7
+ 1 changesets became public
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
(leaving bookmark active-before-pull)
@@ -229,6 +236,7 @@
adding file changes
added 2 changesets with 1 changes to 1 files
new changesets f815b3da6163:b5e4babfaaa7
+ 1 changesets became public
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
(leaving bookmark active-before-pull)
--- a/tests/test-pull.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-pull.t Wed Jun 06 13:31:24 2018 -0400
@@ -109,12 +109,12 @@
It's tricky to make file:// URLs working on every platform with
regular shell commands.
- $ URL=`$PYTHON -c "import os; print 'file://foobar' + ('/' + os.getcwd().replace(os.sep, '/')).replace('//', '/') + '/../test'"`
+ $ URL=`$PYTHON -c "from __future__ import print_function; import os; print('file://foobar' + ('/' + os.getcwd().replace(os.sep, '/')).replace('//', '/') + '/../test')"`
$ hg pull -q "$URL"
abort: file:// URLs can only refer to localhost
[255]
- $ URL=`$PYTHON -c "import os; print 'file://localhost' + ('/' + os.getcwd().replace(os.sep, '/')).replace('//', '/') + '/../test'"`
+ $ URL=`$PYTHON -c "from __future__ import print_function; import os; print('file://localhost' + ('/' + os.getcwd().replace(os.sep, '/')).replace('//', '/') + '/../test')"`
$ hg pull -q "$URL"
SEC: check for unsafe ssh url
--- a/tests/test-push-http.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-push-http.t Wed Jun 06 13:31:24 2018 -0400
@@ -1,4 +1,4 @@
-#require killdaemons
+#require no-chg
#testcases bundle1 bundle2
--- a/tests/test-py3-commands.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-py3-commands.t Wed Jun 06 13:31:24 2018 -0400
@@ -198,28 +198,28 @@
$ $PYTHON3 $HGBIN log -Tjson
[
{
- "rev": 1,
- "node": "e1e9167203d450ca2f558af628955b5f5afd4489",
+ "bookmarks": [],
"branch": "default",
- "phase": "draft",
- "user": "test",
"date": [0, 0],
"desc": "message",
- "bookmarks": [],
+ "node": "e1e9167203d450ca2f558af628955b5f5afd4489",
+ "parents": ["71c96e924262969ff0d8d3d695b0f75412ccc3d8"],
+ "phase": "draft",
+ "rev": 1,
"tags": ["tip"],
- "parents": ["71c96e924262969ff0d8d3d695b0f75412ccc3d8"]
+ "user": "test"
},
{
- "rev": 0,
- "node": "71c96e924262969ff0d8d3d695b0f75412ccc3d8",
+ "bookmarks": [],
"branch": "default",
- "phase": "draft",
- "user": "test",
"date": [0, 0],
"desc": "commit performed in Python 3",
- "bookmarks": [],
+ "node": "71c96e924262969ff0d8d3d695b0f75412ccc3d8",
+ "parents": ["0000000000000000000000000000000000000000"],
+ "phase": "draft",
+ "rev": 0,
"tags": [],
- "parents": ["0000000000000000000000000000000000000000"]
+ "user": "test"
}
]
--- a/tests/test-rebase-obsolete.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-rebase-obsolete.t Wed Jun 06 13:31:24 2018 -0400
@@ -281,7 +281,7 @@
even though it is hidden (until we're moved there).
$ hg --hidden up -qr 'first(hidden())'
- updating to a hidden changeset 42ccdea3bb16
+ updated to hidden changeset 42ccdea3bb16
(hidden revision '42ccdea3bb16' is pruned)
$ hg rebase --rev 13 --dest 15
rebasing 13:98f6af4ee953 "C"
@@ -642,9 +642,9 @@
Test hidden changesets in the rebase set (issue4504)
$ hg up --hidden 9
- updating to a hidden changeset 4bde274eefcf
+ 3 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ updated to hidden changeset 4bde274eefcf
(hidden revision '4bde274eefcf' was rewritten as: acd174b7ab39)
- 3 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ echo J > J
$ hg add J
$ hg commit -m J
@@ -764,9 +764,9 @@
$ hg commit --amend -m B1
$ hg commit --amend -m B2
$ hg up --hidden 'desc(B0)'
- updating to a hidden changeset a8b11f55fb19
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to hidden changeset a8b11f55fb19
(hidden revision 'a8b11f55fb19' was rewritten as: 261e70097290)
- 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ echo C > C
$ hg add C
$ hg commit -m C
@@ -790,9 +790,9 @@
Even when the chain include missing node
$ hg up --hidden 'desc(B0)'
- updating to a hidden changeset a8b11f55fb19
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ updated to hidden changeset a8b11f55fb19
(hidden revision 'a8b11f55fb19' was rewritten as: 261e70097290)
- 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ echo D > D
$ hg add D
$ hg commit -m D
@@ -909,9 +909,9 @@
$ hg add bar
$ hg commit --amend -m "10'"
$ hg up 10 --hidden
- updating to a hidden changeset 121d9e3bc4c6
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ updated to hidden changeset 121d9e3bc4c6
(hidden revision '121d9e3bc4c6' was rewritten as: 77d874d096a2)
- 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ echo "bar" > foo
$ hg add foo
$ hg commit -m "bar foo"
@@ -1738,7 +1738,7 @@
$ rm .hg/localtags
$ hg update -q $C --hidden
- updating to a hidden changeset 7829726be4dc
+ updated to hidden changeset 7829726be4dc
(hidden revision '7829726be4dc' is pruned)
$ hg rebase -s $B -d $D
rebasing 1:2ec65233581b "B"
--- a/tests/test-rebuildstate.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-rebuildstate.t Wed Jun 06 13:31:24 2018 -0400
@@ -4,9 +4,9 @@
> cmdtable = {}
> command = registrar.command(cmdtable)
> @command(b'debugadddrop',
- > [('', 'drop', False, 'drop file from dirstate', 'FILE'),
- > ('', 'normal-lookup', False, 'add file to dirstate', 'FILE')],
- > 'hg debugadddrop')
+ > [(b'', b'drop', False, b'drop file from dirstate', b'FILE'),
+ > (b'', b'normal-lookup', False, b'add file to dirstate', b'FILE')],
+ > b'hg debugadddrop')
> def debugadddrop(ui, repo, *pats, **opts):
> '''Add or drop unnamed arguments to or from the dirstate'''
> drop = opts.get('drop')
--- a/tests/test-rename-after-merge.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-rename-after-merge.t Wed Jun 06 13:31:24 2018 -0400
@@ -36,6 +36,7 @@
adding file changes
added 1 changesets with 1 changes to 1 files (+1 heads)
new changesets d2ae7f538514
+ 1 changesets became public
(run 'hg heads' to see heads, 'hg merge' to merge)
$ hg merge
--- a/tests/test-rename-dir-merge.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-rename-dir-merge.t Wed Jun 06 13:31:24 2018 -0400
@@ -219,6 +219,7 @@
adding file changes
added 1 changesets with 1 changes to 1 files (+1 heads)
new changesets 7d51ed18da25
+ 1 changesets became public
(run 'hg heads' to see heads, 'hg merge' to merge)
$ hg merge
--- a/tests/test-revert-interactive.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-revert-interactive.t Wed Jun 06 13:31:24 2018 -0400
@@ -22,16 +22,16 @@
$ mkdir -p a/folder1 a/folder2
$ cd a
$ hg init
- >>> open('f', 'wb').write("1\n2\n3\n4\n5\n")
+ >>> open('f', 'wb').write(b"1\n2\n3\n4\n5\n") and None
$ hg add f ; hg commit -m "adding f"
$ cat f > folder1/g ; hg add folder1/g ; hg commit -m "adding folder1/g"
$ cat f > folder2/h ; hg add folder2/h ; hg commit -m "adding folder2/h"
$ cat f > folder1/i ; hg add folder1/i ; hg commit -m "adding folder1/i"
- >>> open('f', 'wb').write("a\n1\n2\n3\n4\n5\nb\n")
+ >>> open('f', 'wb').write(b"a\n1\n2\n3\n4\n5\nb\n") and None
$ hg commit -m "modifying f"
- >>> open('folder1/g', 'wb').write("c\n1\n2\n3\n4\n5\nd\n")
+ >>> open('folder1/g', 'wb').write(b"c\n1\n2\n3\n4\n5\nd\n") and None
$ hg commit -m "modifying folder1/g"
- >>> open('folder2/h', 'wb').write("e\n1\n2\n3\n4\n5\nf\n")
+ >>> open('folder2/h', 'wb').write(b"e\n1\n2\n3\n4\n5\nf\n") and None
$ hg commit -m "modifying folder2/h"
$ hg tip
changeset: 6:59dd6e4ab63a
@@ -182,7 +182,7 @@
$ ls folder1/
g
- >>> open('folder1/g', 'wb').write("1\n2\n3\n4\n5\nd\n")
+ >>> open('folder1/g', 'wb').write(b"1\n2\n3\n4\n5\nd\n") and None
$ hg update -C 6
--- a/tests/test-revlog-raw.py Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-revlog-raw.py Wed Jun 06 13:31:24 2018 -0400
@@ -13,10 +13,10 @@
)
# TESTTMP is optional. This makes it convenient to run without run-tests.py
-tvfs = vfs.vfs(encoding.environ.get('TESTTMP', b'/tmp'))
+tvfs = vfs.vfs(encoding.environ.get(b'TESTTMP', b'/tmp'))
# Enable generaldelta otherwise revlog won't use delta as expected by the test
-tvfs.options = {'generaldelta': True, 'revlogv1': True}
+tvfs.options = {b'generaldelta': True, b'revlogv1': True}
# The test wants to control whether to use delta explicitly, based on
# "storedeltachains".
@@ -116,21 +116,21 @@
deltaparent = min(0, parentrev)
if not rlog.candelta(deltaparent, r):
deltaparent = -1
- return {'node': rlog.node(r), 'p1': pnode, 'p2': node.nullid,
- 'cs': rlog.node(rlog.linkrev(r)), 'flags': rlog.flags(r),
- 'deltabase': rlog.node(deltaparent),
- 'delta': rlog.revdiff(deltaparent, r)}
+ return {b'node': rlog.node(r), b'p1': pnode, b'p2': node.nullid,
+ b'cs': rlog.node(rlog.linkrev(r)), b'flags': rlog.flags(r),
+ b'deltabase': rlog.node(deltaparent),
+ b'delta': rlog.revdiff(deltaparent, r)}
def deltaiter(self):
chain = None
for chunkdata in iter(lambda: self.deltachunk(chain), {}):
- node = chunkdata['node']
- p1 = chunkdata['p1']
- p2 = chunkdata['p2']
- cs = chunkdata['cs']
- deltabase = chunkdata['deltabase']
- delta = chunkdata['delta']
- flags = chunkdata['flags']
+ node = chunkdata[b'node']
+ p1 = chunkdata[b'p1']
+ p2 = chunkdata[b'p2']
+ cs = chunkdata[b'cs']
+ deltabase = chunkdata[b'deltabase']
+ delta = chunkdata[b'delta']
+ flags = chunkdata[b'flags']
chain = node
@@ -166,9 +166,9 @@
flags = rlog.flags(r)
ifh = dfh = None
try:
- ifh = dlog.opener(dlog.indexfile, 'a+')
+ ifh = dlog.opener(dlog.indexfile, b'a+')
if not dlog._inline:
- dfh = dlog.opener(dlog.datafile, 'a+')
+ dfh = dlog.opener(dlog.datafile, b'a+')
dlog._addrevision(rlog.node(r), text, tr, r, p1, p2, flags,
cachedelta, ifh, dfh)
finally:
@@ -305,7 +305,7 @@
checkrevlog(rl2, expected)
print('addgroupcopy test passed')
# Copy via revlog.clone
- rl3 = newrevlog(name='_destrevlog3.i', recreate=True)
+ rl3 = newrevlog(name=b'_destrevlog3.i', recreate=True)
rl.clone(tr, rl3)
checkrevlog(rl3, expected)
print('clone test passed')
--- a/tests/test-revlog.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-revlog.t Wed Jun 06 13:31:24 2018 -0400
@@ -4,7 +4,7 @@
Flags on revlog version 0 are rejected
>>> with open('.hg/store/00changelog.i', 'wb') as fh:
- ... fh.write(b'\x00\x01\x00\x00')
+ ... fh.write(b'\x00\x01\x00\x00') and None
$ hg log
abort: unknown flags (0x01) in version 0 revlog 00changelog.i!
@@ -13,7 +13,7 @@
Unknown flags on revlog version 1 are rejected
>>> with open('.hg/store/00changelog.i', 'wb') as fh:
- ... fh.write(b'\x00\x04\x00\x01')
+ ... fh.write(b'\x00\x04\x00\x01') and None
$ hg log
abort: unknown flags (0x04) in version 1 revlog 00changelog.i!
@@ -22,7 +22,7 @@
Unknown version is rejected
>>> with open('.hg/store/00changelog.i', 'wb') as fh:
- ... fh.write(b'\x00\x00\x00\x02')
+ ... fh.write(b'\x00\x00\x00\x02') and None
$ hg log
abort: unknown version (2) in revlog 00changelog.i!
--- a/tests/test-revset.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-revset.t Wed Jun 06 13:31:24 2018 -0400
@@ -1713,8 +1713,6 @@
Test hexadecimal revision
$ log 'id(2)'
- abort: 00changelog.i@2: ambiguous identifier!
- [255]
$ log 'id(23268)'
4
$ log 'id(2785f51eece)'
@@ -1876,9 +1874,9 @@
$ hg debugrevspec '0:wdir() & fffb'
abort: 00changelog.i@fffb: ambiguous identifier!
[255]
-BROKEN should be '2' (node lookup uses unfiltered repo since dc25ed84bee8)
+BROKEN should be '2' (node lookup uses unfiltered repo)
$ hg debugrevspec '0:wdir() & id(fffb)'
- 2
+BROKEN should be '2' (node lookup uses unfiltered repo)
$ hg debugrevspec '0:wdir() & ffff8'
4
$ hg debugrevspec '0:wdir() & fffff'
--- a/tests/test-revset2.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-revset2.t Wed Jun 06 13:31:24 2018 -0400
@@ -1589,11 +1589,11 @@
>
> revsetpredicate = registrar.revsetpredicate()
>
- > @revsetpredicate('custom1()')
+ > @revsetpredicate(b'custom1()')
> def custom1(repo, subset, x):
> return revset.baseset([1])
>
- > raise error.Abort('intentional failure of loading extension')
+ > raise error.Abort(b'intentional failure of loading extension')
> EOF
$ cat <<EOF > .hg/hgrc
> [extensions]
@@ -1611,14 +1611,14 @@
> from mercurial import encoding, registrar
> cmdtable = {}
> command = registrar.command(cmdtable)
- > @command('printprevset')
+ > @command(b'printprevset')
> def printprevset(ui, repo):
> alias = {}
- > p = encoding.environ.get('P')
+ > p = encoding.environ.get(b'P')
> if p:
- > alias['P'] = p
- > revs = repo.anyrevs(['P'], user=True, localalias=alias)
- > ui.write('P=%r\n' % list(revs))
+ > alias[b'P'] = p
+ > revs = repo.anyrevs([b'P'], user=True, localalias=alias)
+ > ui.write(b'P=%r\n' % list(revs))
> EOF
$ cat >> .hg/hgrc <<EOF
--- a/tests/test-run-tests.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-run-tests.t Wed Jun 06 13:31:24 2018 -0400
@@ -574,7 +574,6 @@
# Ran 1 tests, 0 skipped, 0 failed.
$ rm test-serve-inuse.t
$ killdaemons.py $DAEMON_PIDS
- $ rm $DAEMON_PIDS
Running In Debug Mode
======================
@@ -1497,9 +1496,9 @@
$ [ $V = C ]
#endif
- ERROR: test-cases-abc.t (case B) output changed
+ ERROR: test-cases-abc.t#B output changed
!.
- Failed test-cases-abc.t (case B): output changed
+ Failed test-cases-abc.t#B: output changed
# Ran 3 tests, 0 skipped, 1 failed.
python hash seed: * (glob)
[1]
@@ -1520,9 +1519,9 @@
$ [ $V = C ]
#endif
- ERROR: test-cases-abc.t (case B) output changed
+ ERROR: test-cases-abc.t#B output changed
!.
- Failed test-cases-abc.t (case B): output changed
+ Failed test-cases-abc.t#B: output changed
# Ran 2 tests, 0 skipped, 1 failed.
python hash seed: * (glob)
[1]
@@ -1545,9 +1544,9 @@
$ [ $V = C ]
#endif
- ERROR: test-cases-abc.t (case B) output changed
+ ERROR: test-cases-abc.t#B output changed
!.
- Failed test-cases-abc.t (case B): output changed
+ Failed test-cases-abc.t#B: output changed
# Ran 2 tests, 0 skipped, 1 failed.
python hash seed: * (glob)
[1]
@@ -1572,7 +1571,77 @@
..
# Ran 2 tests, 0 skipped, 0 failed.
+Support running a specific test case
+
+ $ rt "test-cases-abc.t#B"
+
+ --- $TESTTMP/anothertests/cases/test-cases-abc.t
+ +++ $TESTTMP/anothertests/cases/test-cases-abc.t.B.err
+ @@ -7,7 +7,7 @@
+ $ V=C
+ #endif
+ $ echo $V | sed 's/A/C/'
+ - C
+ + B
+ #if C
+ $ [ $V = C ]
+ #endif
+
+ ERROR: test-cases-abc.t#B output changed
+ !
+ Failed test-cases-abc.t#B: output changed
+ # Ran 1 tests, 0 skipped, 1 failed.
+ python hash seed: * (glob)
+ [1]
+
+Support running multiple test cases in the same file
+
+ $ rt test-cases-abc.t#B test-cases-abc.t#C
+
+ --- $TESTTMP/anothertests/cases/test-cases-abc.t
+ +++ $TESTTMP/anothertests/cases/test-cases-abc.t.B.err
+ @@ -7,7 +7,7 @@
+ $ V=C
+ #endif
+ $ echo $V | sed 's/A/C/'
+ - C
+ + B
+ #if C
+ $ [ $V = C ]
+ #endif
+
+ ERROR: test-cases-abc.t#B output changed
+ !.
+ Failed test-cases-abc.t#B: output changed
+ # Ran 2 tests, 0 skipped, 1 failed.
+ python hash seed: * (glob)
+ [1]
+
+Support running invalid test cases
+
+ $ rt test-cases-abc.t#B test-cases-abc.t#D
+
+ --- $TESTTMP/anothertests/cases/test-cases-abc.t
+ +++ $TESTTMP/anothertests/cases/test-cases-abc.t.B.err
+ @@ -7,7 +7,7 @@
+ $ V=C
+ #endif
+ $ echo $V | sed 's/A/C/'
+ - C
+ + B
+ #if C
+ $ [ $V = C ]
+ #endif
+
+ ERROR: test-cases-abc.t#B output changed
+ !
+ Failed test-cases-abc.t#B: output changed
+ # Ran 1 tests, 0 skipped, 1 failed.
+ python hash seed: * (glob)
+ [1]
+
Test automatic pattern replacement
+==================================
$ cat << EOF >> common-pattern.py
> substitutions = [
--- a/tests/test-serve.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-serve.t Wed Jun 06 13:31:24 2018 -0400
@@ -6,10 +6,11 @@
> | sed -e "s/:$HGPORT1\\([^0-9]\\)/:HGPORT1\1/g" \
> -e "s/:$HGPORT2\\([^0-9]\\)/:HGPORT2\1/g" \
> -e 's/http:\/\/[^/]*\//http:\/\/localhost\//'
- > cat hg.pid >> "$DAEMON_PIDS"
+ > if [ -f hg.pid ]; then
+ > killdaemons.py hg.pid
+ > fi
> echo % errors
> cat errors.log
- > killdaemons.py hg.pid
> }
$ hg init test
--- a/tests/test-share.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-share.t Wed Jun 06 13:31:24 2018 -0400
@@ -1,5 +1,3 @@
-#require killdaemons
-
$ echo "[extensions]" >> $HGRCPATH
$ echo "share = " >> $HGRCPATH
@@ -362,6 +360,7 @@
searching for changes
no changes found
adding remote bookmark bm3
+ 1 changesets became public
$ hg boo
bm1 3:b87954705719
* bm3 4:62f4ded848e4
--- a/tests/test-simple-update.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-simple-update.t Wed Jun 06 13:31:24 2018 -0400
@@ -31,6 +31,7 @@
adding file changes
added 1 changesets with 1 changes to 1 files
new changesets 30aff43faee1
+ 1 changesets became public
(run 'hg update' to get a working copy)
$ hg verify
--- a/tests/test-simplekeyvaluefile.py Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-simplekeyvaluefile.py Wed Jun 06 13:31:24 2018 -0400
@@ -44,11 +44,11 @@
self.vfs = mockvfs()
def testbasicwritingiandreading(self):
- dw = {'key1': 'value1', 'Key2': 'value2'}
- scmutil.simplekeyvaluefile(self.vfs, 'kvfile').write(dw)
- self.assertEqual(sorted(self.vfs.read('kvfile').split('\n')),
- ['', 'Key2=value2', 'key1=value1'])
- dr = scmutil.simplekeyvaluefile(self.vfs, 'kvfile').read()
+ dw = {b'key1': b'value1', b'Key2': b'value2'}
+ scmutil.simplekeyvaluefile(self.vfs, b'kvfile').write(dw)
+ self.assertEqual(sorted(self.vfs.read(b'kvfile').split(b'\n')),
+ [b'', b'Key2=value2', b'key1=value1'])
+ dr = scmutil.simplekeyvaluefile(self.vfs, b'kvfile').read()
self.assertEqual(dr, dw)
if not getattr(unittest.TestCase, 'assertRaisesRegex', False):
@@ -58,33 +58,33 @@
unittest.TestCase.assertRaisesRegexp)
def testinvalidkeys(self):
- d = {'0key1': 'value1', 'Key2': 'value2'}
+ d = {b'0key1': b'value1', b'Key2': b'value2'}
with self.assertRaisesRegex(error.ProgrammingError,
'keys must start with a letter.*'):
- scmutil.simplekeyvaluefile(self.vfs, 'kvfile').write(d)
+ scmutil.simplekeyvaluefile(self.vfs, b'kvfile').write(d)
- d = {'key1@': 'value1', 'Key2': 'value2'}
+ d = {b'key1@': b'value1', b'Key2': b'value2'}
with self.assertRaisesRegex(error.ProgrammingError, 'invalid key.*'):
- scmutil.simplekeyvaluefile(self.vfs, 'kvfile').write(d)
+ scmutil.simplekeyvaluefile(self.vfs, b'kvfile').write(d)
def testinvalidvalues(self):
- d = {'key1': 'value1', 'Key2': 'value2\n'}
+ d = {b'key1': b'value1', b'Key2': b'value2\n'}
with self.assertRaisesRegex(error.ProgrammingError, 'invalid val.*'):
- scmutil.simplekeyvaluefile(self.vfs, 'kvfile').write(d)
+ scmutil.simplekeyvaluefile(self.vfs, b'kvfile').write(d)
def testcorruptedfile(self):
- self.vfs.contents['badfile'] = 'ababagalamaga\n'
+ self.vfs.contents[b'badfile'] = b'ababagalamaga\n'
with self.assertRaisesRegex(error.CorruptedState,
'dictionary.*element.*'):
- scmutil.simplekeyvaluefile(self.vfs, 'badfile').read()
+ scmutil.simplekeyvaluefile(self.vfs, b'badfile').read()
def testfirstline(self):
- dw = {'key1': 'value1'}
- scmutil.simplekeyvaluefile(self.vfs, 'fl').write(dw, firstline='1.0')
- self.assertEqual(self.vfs.read('fl'), '1.0\nkey1=value1\n')
- dr = scmutil.simplekeyvaluefile(self.vfs, 'fl')\
+ dw = {b'key1': b'value1'}
+ scmutil.simplekeyvaluefile(self.vfs, b'fl').write(dw, firstline=b'1.0')
+ self.assertEqual(self.vfs.read(b'fl'), b'1.0\nkey1=value1\n')
+ dr = scmutil.simplekeyvaluefile(self.vfs, b'fl')\
.read(firstlinenonkeyval=True)
- self.assertEqual(dr, {'__firstline': '1.0', 'key1': 'value1'})
+ self.assertEqual(dr, {b'__firstline': b'1.0', b'key1': b'value1'})
if __name__ == "__main__":
silenttestrunner.main(__name__)
--- a/tests/test-simplemerge.py Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-simplemerge.py Wed Jun 06 13:31:24 2018 -0400
@@ -35,12 +35,12 @@
incorporating the changes from both BASE->OTHER and BASE->THIS.
All three will typically be sequences of lines."""
def __init__(self, base, a, b):
- basetext = '\n'.join([i.strip('\n') for i in base] + [''])
- atext = '\n'.join([i.strip('\n') for i in a] + [''])
- btext = '\n'.join([i.strip('\n') for i in b] + [''])
+ basetext = b'\n'.join([i.strip(b'\n') for i in base] + [b''])
+ atext = b'\n'.join([i.strip(b'\n') for i in a] + [b''])
+ btext = b'\n'.join([i.strip(b'\n') for i in b] + [b''])
if (stringutil.binary(basetext) or stringutil.binary(atext)
or stringutil.binary(btext)):
- raise error.Abort("don't know how to merge binary files")
+ raise error.Abort(b"don't know how to merge binary files")
simplemerge.Merge3Text.__init__(self, basetext, atext, btext,
base, a, b)
@@ -52,7 +52,7 @@
############################################################
# test case data from the gnu diffutils manual
# common base
-TZU = split_lines(""" The Nameless is the origin of Heaven and Earth;
+TZU = split_lines(b""" The Nameless is the origin of Heaven and Earth;
The named is the mother of all things.
Therefore let there always be non-being,
@@ -67,7 +67,7 @@
The door of all subtleties!
""")
-LAO = split_lines(""" The Way that can be told of is not the eternal Way;
+LAO = split_lines(b""" The Way that can be told of is not the eternal Way;
The name that can be named is not the eternal name.
The Nameless is the origin of Heaven and Earth;
The Named is the mother of all things.
@@ -81,7 +81,7 @@
""")
-TAO = split_lines(""" The Way that can be told of is not the eternal Way;
+TAO = split_lines(b""" The Way that can be told of is not the eternal Way;
The name that can be named is not the eternal name.
The Nameless is the origin of Heaven and Earth;
The named is the mother of all things.
@@ -98,7 +98,7 @@
""")
-MERGED_RESULT = split_lines("""\
+MERGED_RESULT = split_lines(b"""\
The Way that can be told of is not the eternal Way;
The name that can be named is not the eternal name.
The Nameless is the origin of Heaven and Earth;
@@ -124,167 +124,167 @@
def test_no_changes(self):
"""No conflicts because nothing changed"""
- m3 = Merge3(['aaa', 'bbb'],
- ['aaa', 'bbb'],
- ['aaa', 'bbb'])
+ m3 = Merge3([b'aaa', b'bbb'],
+ [b'aaa', b'bbb'],
+ [b'aaa', b'bbb'])
- self.assertEquals(m3.find_unconflicted(),
- [(0, 2)])
+ self.assertEqual(m3.find_unconflicted(),
+ [(0, 2)])
- self.assertEquals(list(m3.find_sync_regions()),
- [(0, 2,
- 0, 2,
- 0, 2),
- (2, 2, 2, 2, 2, 2)])
+ self.assertEqual(list(m3.find_sync_regions()),
+ [(0, 2,
+ 0, 2,
+ 0, 2),
+ (2, 2, 2, 2, 2, 2)])
- self.assertEquals(list(m3.merge_regions()),
- [('unchanged', 0, 2)])
+ self.assertEqual(list(m3.merge_regions()),
+ [(b'unchanged', 0, 2)])
- self.assertEquals(list(m3.merge_groups()),
- [('unchanged', ['aaa', 'bbb'])])
+ self.assertEqual(list(m3.merge_groups()),
+ [(b'unchanged', [b'aaa', b'bbb'])])
def test_front_insert(self):
- m3 = Merge3(['zz'],
- ['aaa', 'bbb', 'zz'],
- ['zz'])
+ m3 = Merge3([b'zz'],
+ [b'aaa', b'bbb', b'zz'],
+ [b'zz'])
# todo: should use a sentinel at end as from get_matching_blocks
# to match without zz
- self.assertEquals(list(m3.find_sync_regions()),
- [(0, 1, 2, 3, 0, 1),
- (1, 1, 3, 3, 1, 1)])
+ self.assertEqual(list(m3.find_sync_regions()),
+ [(0, 1, 2, 3, 0, 1),
+ (1, 1, 3, 3, 1, 1)])
- self.assertEquals(list(m3.merge_regions()),
- [('a', 0, 2),
- ('unchanged', 0, 1)])
+ self.assertEqual(list(m3.merge_regions()),
+ [(b'a', 0, 2),
+ (b'unchanged', 0, 1)])
- self.assertEquals(list(m3.merge_groups()),
- [('a', ['aaa', 'bbb']),
- ('unchanged', ['zz'])])
+ self.assertEqual(list(m3.merge_groups()),
+ [(b'a', [b'aaa', b'bbb']),
+ (b'unchanged', [b'zz'])])
def test_null_insert(self):
m3 = Merge3([],
- ['aaa', 'bbb'],
+ [b'aaa', b'bbb'],
[])
# todo: should use a sentinel at end as from get_matching_blocks
# to match without zz
- self.assertEquals(list(m3.find_sync_regions()),
- [(0, 0, 2, 2, 0, 0)])
+ self.assertEqual(list(m3.find_sync_regions()),
+ [(0, 0, 2, 2, 0, 0)])
- self.assertEquals(list(m3.merge_regions()),
- [('a', 0, 2)])
+ self.assertEqual(list(m3.merge_regions()),
+ [(b'a', 0, 2)])
- self.assertEquals(list(m3.merge_lines()),
- ['aaa', 'bbb'])
+ self.assertEqual(list(m3.merge_lines()),
+ [b'aaa', b'bbb'])
def test_no_conflicts(self):
"""No conflicts because only one side changed"""
- m3 = Merge3(['aaa', 'bbb'],
- ['aaa', '111', 'bbb'],
- ['aaa', 'bbb'])
+ m3 = Merge3([b'aaa', b'bbb'],
+ [b'aaa', b'111', b'bbb'],
+ [b'aaa', b'bbb'])
- self.assertEquals(m3.find_unconflicted(),
- [(0, 1), (1, 2)])
+ self.assertEqual(m3.find_unconflicted(),
+ [(0, 1), (1, 2)])
- self.assertEquals(list(m3.find_sync_regions()),
- [(0, 1, 0, 1, 0, 1),
- (1, 2, 2, 3, 1, 2),
- (2, 2, 3, 3, 2, 2)])
+ self.assertEqual(list(m3.find_sync_regions()),
+ [(0, 1, 0, 1, 0, 1),
+ (1, 2, 2, 3, 1, 2),
+ (2, 2, 3, 3, 2, 2)])
- self.assertEquals(list(m3.merge_regions()),
- [('unchanged', 0, 1),
- ('a', 1, 2),
- ('unchanged', 1, 2)])
+ self.assertEqual(list(m3.merge_regions()),
+ [(b'unchanged', 0, 1),
+ (b'a', 1, 2),
+ (b'unchanged', 1, 2)])
def test_append_a(self):
- m3 = Merge3(['aaa\n', 'bbb\n'],
- ['aaa\n', 'bbb\n', '222\n'],
- ['aaa\n', 'bbb\n'])
+ m3 = Merge3([b'aaa\n', b'bbb\n'],
+ [b'aaa\n', b'bbb\n', b'222\n'],
+ [b'aaa\n', b'bbb\n'])
- self.assertEquals(''.join(m3.merge_lines()),
- 'aaa\nbbb\n222\n')
+ self.assertEqual(b''.join(m3.merge_lines()),
+ b'aaa\nbbb\n222\n')
def test_append_b(self):
- m3 = Merge3(['aaa\n', 'bbb\n'],
- ['aaa\n', 'bbb\n'],
- ['aaa\n', 'bbb\n', '222\n'])
+ m3 = Merge3([b'aaa\n', b'bbb\n'],
+ [b'aaa\n', b'bbb\n'],
+ [b'aaa\n', b'bbb\n', b'222\n'])
- self.assertEquals(''.join(m3.merge_lines()),
- 'aaa\nbbb\n222\n')
+ self.assertEqual(b''.join(m3.merge_lines()),
+ b'aaa\nbbb\n222\n')
def test_append_agreement(self):
- m3 = Merge3(['aaa\n', 'bbb\n'],
- ['aaa\n', 'bbb\n', '222\n'],
- ['aaa\n', 'bbb\n', '222\n'])
+ m3 = Merge3([b'aaa\n', b'bbb\n'],
+ [b'aaa\n', b'bbb\n', b'222\n'],
+ [b'aaa\n', b'bbb\n', b'222\n'])
- self.assertEquals(''.join(m3.merge_lines()),
- 'aaa\nbbb\n222\n')
+ self.assertEqual(b''.join(m3.merge_lines()),
+ b'aaa\nbbb\n222\n')
def test_append_clash(self):
- m3 = Merge3(['aaa\n', 'bbb\n'],
- ['aaa\n', 'bbb\n', '222\n'],
- ['aaa\n', 'bbb\n', '333\n'])
+ m3 = Merge3([b'aaa\n', b'bbb\n'],
+ [b'aaa\n', b'bbb\n', b'222\n'],
+ [b'aaa\n', b'bbb\n', b'333\n'])
- ml = m3.merge_lines(name_a='a',
- name_b='b',
- start_marker='<<',
- mid_marker='--',
- end_marker='>>')
- self.assertEquals(''.join(ml),
- 'aaa\n'
- 'bbb\n'
- '<< a\n'
- '222\n'
- '--\n'
- '333\n'
- '>> b\n'
+ ml = m3.merge_lines(name_a=b'a',
+ name_b=b'b',
+ start_marker=b'<<',
+ mid_marker=b'--',
+ end_marker=b'>>')
+ self.assertEqual(b''.join(ml),
+ b'aaa\n'
+ b'bbb\n'
+ b'<< a\n'
+ b'222\n'
+ b'--\n'
+ b'333\n'
+ b'>> b\n'
)
def test_insert_agreement(self):
- m3 = Merge3(['aaa\n', 'bbb\n'],
- ['aaa\n', '222\n', 'bbb\n'],
- ['aaa\n', '222\n', 'bbb\n'])
+ m3 = Merge3([b'aaa\n', b'bbb\n'],
+ [b'aaa\n', b'222\n', b'bbb\n'],
+ [b'aaa\n', b'222\n', b'bbb\n'])
- ml = m3.merge_lines(name_a='a',
- name_b='b',
- start_marker='<<',
- mid_marker='--',
- end_marker='>>')
- self.assertEquals(''.join(ml), 'aaa\n222\nbbb\n')
+ ml = m3.merge_lines(name_a=b'a',
+ name_b=b'b',
+ start_marker=b'<<',
+ mid_marker=b'--',
+ end_marker=b'>>')
+ self.assertEqual(b''.join(ml), b'aaa\n222\nbbb\n')
def test_insert_clash(self):
"""Both try to insert lines in the same place."""
- m3 = Merge3(['aaa\n', 'bbb\n'],
- ['aaa\n', '111\n', 'bbb\n'],
- ['aaa\n', '222\n', 'bbb\n'])
+ m3 = Merge3([b'aaa\n', b'bbb\n'],
+ [b'aaa\n', b'111\n', b'bbb\n'],
+ [b'aaa\n', b'222\n', b'bbb\n'])
- self.assertEquals(m3.find_unconflicted(),
- [(0, 1), (1, 2)])
+ self.assertEqual(m3.find_unconflicted(),
+ [(0, 1), (1, 2)])
- self.assertEquals(list(m3.find_sync_regions()),
- [(0, 1, 0, 1, 0, 1),
- (1, 2, 2, 3, 2, 3),
- (2, 2, 3, 3, 3, 3)])
+ self.assertEqual(list(m3.find_sync_regions()),
+ [(0, 1, 0, 1, 0, 1),
+ (1, 2, 2, 3, 2, 3),
+ (2, 2, 3, 3, 3, 3)])
- self.assertEquals(list(m3.merge_regions()),
- [('unchanged', 0, 1),
- ('conflict', 1, 1, 1, 2, 1, 2),
- ('unchanged', 1, 2)])
+ self.assertEqual(list(m3.merge_regions()),
+ [(b'unchanged', 0, 1),
+ (b'conflict', 1, 1, 1, 2, 1, 2),
+ (b'unchanged', 1, 2)])
- self.assertEquals(list(m3.merge_groups()),
- [('unchanged', ['aaa\n']),
- ('conflict', [], ['111\n'], ['222\n']),
- ('unchanged', ['bbb\n']),
- ])
+ self.assertEqual(list(m3.merge_groups()),
+ [(b'unchanged', [b'aaa\n']),
+ (b'conflict', [], [b'111\n'], [b'222\n']),
+ (b'unchanged', [b'bbb\n']),
+ ])
- ml = m3.merge_lines(name_a='a',
- name_b='b',
- start_marker='<<',
- mid_marker='--',
- end_marker='>>')
- self.assertEquals(''.join(ml),
-'''aaa
+ ml = m3.merge_lines(name_a=b'a',
+ name_b=b'b',
+ start_marker=b'<<',
+ mid_marker=b'--',
+ end_marker=b'>>')
+ self.assertEqual(b''.join(ml),
+b'''aaa
<< a
111
--
@@ -295,64 +295,64 @@
def test_replace_clash(self):
"""Both try to insert lines in the same place."""
- m3 = Merge3(['aaa', '000', 'bbb'],
- ['aaa', '111', 'bbb'],
- ['aaa', '222', 'bbb'])
+ m3 = Merge3([b'aaa', b'000', b'bbb'],
+ [b'aaa', b'111', b'bbb'],
+ [b'aaa', b'222', b'bbb'])
- self.assertEquals(m3.find_unconflicted(),
- [(0, 1), (2, 3)])
+ self.assertEqual(m3.find_unconflicted(),
+ [(0, 1), (2, 3)])
- self.assertEquals(list(m3.find_sync_regions()),
- [(0, 1, 0, 1, 0, 1),
+ self.assertEqual(list(m3.find_sync_regions()),
+ [(0, 1, 0, 1, 0, 1),
(2, 3, 2, 3, 2, 3),
(3, 3, 3, 3, 3, 3)])
def test_replace_multi(self):
"""Replacement with regions of different size."""
- m3 = Merge3(['aaa', '000', '000', 'bbb'],
- ['aaa', '111', '111', '111', 'bbb'],
- ['aaa', '222', '222', '222', '222', 'bbb'])
+ m3 = Merge3([b'aaa', b'000', b'000', b'bbb'],
+ [b'aaa', b'111', b'111', b'111', b'bbb'],
+ [b'aaa', b'222', b'222', b'222', b'222', b'bbb'])
- self.assertEquals(m3.find_unconflicted(),
- [(0, 1), (3, 4)])
+ self.assertEqual(m3.find_unconflicted(),
+ [(0, 1), (3, 4)])
- self.assertEquals(list(m3.find_sync_regions()),
- [(0, 1, 0, 1, 0, 1),
- (3, 4, 4, 5, 5, 6),
- (4, 4, 5, 5, 6, 6)])
+ self.assertEqual(list(m3.find_sync_regions()),
+ [(0, 1, 0, 1, 0, 1),
+ (3, 4, 4, 5, 5, 6),
+ (4, 4, 5, 5, 6, 6)])
def test_merge_poem(self):
"""Test case from diff3 manual"""
m3 = Merge3(TZU, LAO, TAO)
- ml = list(m3.merge_lines('LAO', 'TAO'))
- self.log('merge result:')
- self.log(''.join(ml))
- self.assertEquals(ml, MERGED_RESULT)
+ ml = list(m3.merge_lines(b'LAO', b'TAO'))
+ self.log(b'merge result:')
+ self.log(b''.join(ml))
+ self.assertEqual(ml, MERGED_RESULT)
def test_binary(self):
with self.assertRaises(error.Abort):
- Merge3(['\x00'], ['a'], ['b'])
+ Merge3([b'\x00'], [b'a'], [b'b'])
def test_dos_text(self):
- base_text = 'a\r\n'
- this_text = 'b\r\n'
- other_text = 'c\r\n'
+ base_text = b'a\r\n'
+ this_text = b'b\r\n'
+ other_text = b'c\r\n'
m3 = Merge3(base_text.splitlines(True), other_text.splitlines(True),
this_text.splitlines(True))
- m_lines = m3.merge_lines('OTHER', 'THIS')
- self.assertEqual('<<<<<<< OTHER\r\nc\r\n=======\r\nb\r\n'
- '>>>>>>> THIS\r\n'.splitlines(True), list(m_lines))
+ m_lines = m3.merge_lines(b'OTHER', b'THIS')
+ self.assertEqual(b'<<<<<<< OTHER\r\nc\r\n=======\r\nb\r\n'
+ b'>>>>>>> THIS\r\n'.splitlines(True), list(m_lines))
def test_mac_text(self):
- base_text = 'a\r'
- this_text = 'b\r'
- other_text = 'c\r'
+ base_text = b'a\r'
+ this_text = b'b\r'
+ other_text = b'c\r'
m3 = Merge3(base_text.splitlines(True), other_text.splitlines(True),
this_text.splitlines(True))
- m_lines = m3.merge_lines('OTHER', 'THIS')
- self.assertEqual('<<<<<<< OTHER\rc\r=======\rb\r'
- '>>>>>>> THIS\r'.splitlines(True), list(m_lines))
+ m_lines = m3.merge_lines(b'OTHER', b'THIS')
+ self.assertEqual(b'<<<<<<< OTHER\rc\r=======\rb\r'
+ b'>>>>>>> THIS\r'.splitlines(True), list(m_lines))
if __name__ == '__main__':
# hide the timer
--- a/tests/test-ssh-bundle1.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-ssh-bundle1.t Wed Jun 06 13:31:24 2018 -0400
@@ -444,11 +444,11 @@
>
> def wrappedpush(orig, repo, *args, **kwargs):
> res = orig(repo, *args, **kwargs)
- > repo.ui.write('local stdout\n')
+ > repo.ui.write(b'local stdout\n')
> return res
>
> def extsetup(ui):
- > extensions.wrapfunction(exchange, 'push', wrappedpush)
+ > extensions.wrapfunction(exchange, b'push', wrappedpush)
> EOF
$ cat >> .hg/hgrc << EOF
@@ -537,7 +537,7 @@
$ cat > $TESTTMP/failhook << EOF
> def hook(ui, repo, **kwargs):
- > ui.write('hook failure!\n')
+ > ui.write(b'hook failure!\n')
> ui.flush()
> return 1
> EOF
--- a/tests/test-ssh-proto-unbundle.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-ssh-proto-unbundle.t Wed Jun 06 13:31:24 2018 -0400
@@ -161,10 +161,12 @@
> import sys
> def hook1line(ui, repo, **kwargs):
> ui.write(b'ui.write 1 line\n')
+ > ui.flush()
> return 1
> def hook2lines(ui, repo, **kwargs):
> ui.write(b'ui.write 2 lines 1\n')
> ui.write(b'ui.write 2 lines 2\n')
+ > ui.flush()
> return 1
> def hook1lineflush(ui, repo, **kwargs):
> ui.write(b'ui.write 1 line flush\n')
@@ -181,21 +183,31 @@
> ui.write_err(b'ui.write_err 1\n')
> ui.write(b'ui.write 2\n')
> ui.write_err(b'ui.write_err 2\n')
+ > ui.flush()
> return 1
> def hookprintstdout(ui, repo, **kwargs):
> print('printed line')
+ > sys.stdout.flush()
> return 1
> def hookprintandwrite(ui, repo, **kwargs):
> print('print 1')
+ > sys.stdout.flush()
> ui.write(b'ui.write 1\n')
+ > ui.flush()
> print('print 2')
+ > sys.stdout.flush()
> ui.write(b'ui.write 2\n')
+ > ui.flush()
> return 1
> def hookprintstderrandstdout(ui, repo, **kwargs):
> print('stdout 1')
+ > sys.stdout.flush()
> print('stderr 1', file=sys.stderr)
+ > sys.stderr.flush()
> print('stdout 2')
+ > sys.stdout.flush()
> print('stderr 2', file=sys.stderr)
+ > sys.stderr.flush()
> return 1
> EOF
@@ -1046,10 +1058,10 @@
e> adding manifests\n
e> adding file changes\n
e> added 1 changesets with 1 changes to 1 files\n
+ e> print 1\n
e> ui.write 1\n
+ e> print 2\n
e> ui.write 2\n
- e> print 1\n
- e> print 2\n
e> transaction abort!\n
e> rollback completed\n
e> abort: pretxnchangegroup.fail hook failed\n
@@ -1106,10 +1118,10 @@
e> adding manifests\n
e> adding file changes\n
e> added 1 changesets with 1 changes to 1 files\n
+ e> print 1\n
e> ui.write 1\n
+ e> print 2\n
e> ui.write 2\n
- e> print 1\n
- e> print 2\n
e> transaction abort!\n
e> rollback completed\n
e> abort: pretxnchangegroup.fail hook failed\n
@@ -1180,10 +1192,10 @@
e> adding manifests\n
e> adding file changes\n
e> added 1 changesets with 1 changes to 1 files\n
+ e> stdout 1\n
e> stderr 1\n
+ e> stdout 2\n
e> stderr 2\n
- e> stdout 1\n
- e> stdout 2\n
e> transaction abort!\n
e> rollback completed\n
e> abort: pretxnchangegroup.fail hook failed\n
@@ -1240,10 +1252,10 @@
e> adding manifests\n
e> adding file changes\n
e> added 1 changesets with 1 changes to 1 files\n
+ e> stdout 1\n
e> stderr 1\n
+ e> stdout 2\n
e> stderr 2\n
- e> stdout 1\n
- e> stdout 2\n
e> transaction abort!\n
e> rollback completed\n
e> abort: pretxnchangegroup.fail hook failed\n
@@ -1731,10 +1743,10 @@
e> shell stderr 1\n
e> shell stdout 2\n
e> shell stderr 2\n
+ e> stdout 1\n
e> stderr 1\n
+ e> stdout 2\n
e> stderr 2\n
- e> stdout 1\n
- e> stdout 2\n
e> transaction abort!\n
e> rollback completed\n
e> abort: pretxnchangegroup.b hook failed\n
@@ -1795,10 +1807,10 @@
e> shell stderr 1\n
e> shell stdout 2\n
e> shell stderr 2\n
+ e> stdout 1\n
e> stderr 1\n
+ e> stdout 2\n
e> stderr 2\n
- e> stdout 1\n
- e> stdout 2\n
e> transaction abort!\n
e> rollback completed\n
e> abort: pretxnchangegroup.b hook failed\n
--- a/tests/test-ssh-proto.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-ssh-proto.t Wed Jun 06 13:31:24 2018 -0400
@@ -1,3 +1,5 @@
+#require no-chg
+
$ cat > hgrc-sshv2 << EOF
> %include $HGRCPATH
> [experimental]
--- a/tests/test-ssh.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-ssh.t Wed Jun 06 13:31:24 2018 -0400
@@ -230,7 +230,7 @@
namespaces
phases
$ hg book foo -r 0
- $ hg out -B
+ $ hg out -B --config paths.default=bogus://invalid --config paths.default:pushurl=`hg paths default`
comparing with ssh://user@dummy/remote
searching for changed bookmarks
foo 1160648e36ce
@@ -272,12 +272,14 @@
$ cat <<EOF > $TESTTMP/badhook
> import sys
> sys.stdout.write("KABOOM\n")
+ > sys.stdout.flush()
> EOF
$ cat <<EOF > $TESTTMP/badpyhook.py
> import sys
> def hook(ui, repo, hooktype, **kwargs):
> sys.stdout.write("KABOOM IN PROCESS\n")
+ > sys.stdout.flush()
> EOF
$ cat <<EOF >> ../remote/.hg/hgrc
@@ -455,11 +457,12 @@
>
> def wrappedpush(orig, repo, *args, **kwargs):
> res = orig(repo, *args, **kwargs)
- > repo.ui.write('local stdout\n')
+ > repo.ui.write(b'local stdout\n')
+ > repo.ui.flush()
> return res
>
> def extsetup(ui):
- > extensions.wrapfunction(exchange, 'push', wrappedpush)
+ > extensions.wrapfunction(exchange, b'push', wrappedpush)
> EOF
$ cat >> .hg/hgrc << EOF
@@ -569,7 +572,7 @@
$ cat > $TESTTMP/failhook << EOF
> def hook(ui, repo, **kwargs):
- > ui.write('hook failure!\n')
+ > ui.write(b'hook failure!\n')
> ui.flush()
> return 1
> EOF
--- a/tests/test-static-http.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-static-http.t Wed Jun 06 13:31:24 2018 -0400
@@ -1,4 +1,4 @@
-#require killdaemons no-reposimplestore
+#require no-reposimplestore
$ hg clone http://localhost:$HGPORT/ copy
abort: * (glob)
--- a/tests/test-status-inprocess.py Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-status-inprocess.py Wed Jun 06 13:31:24 2018 -0400
@@ -1,12 +1,24 @@
#!/usr/bin/env python
from __future__ import absolute_import, print_function
+import sys
+
from mercurial import (
commands,
localrepo,
ui as uimod,
)
+print_ = print
+def print(*args, **kwargs):
+ """print() wrapper that flushes stdout buffers to avoid py3 buffer issues
+
+ We could also just write directly to sys.stdout.buffer the way the
+ ui object will, but this was easier for porting the test.
+ """
+ print_(*args, **kwargs)
+ sys.stdout.flush()
+
u = uimod.ui.load()
print('% creating repo')
--- a/tests/test-status-terse.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-status-terse.t Wed Jun 06 13:31:24 2018 -0400
@@ -183,3 +183,55 @@
$ hg status --terse marduic --rev 0 --rev 1
abort: cannot use --terse with --rev
[255]
+
+Config item to set the default terseness
+ $ cat <<EOF >> $HGRCPATH
+ > [commands]
+ > status.terse = u
+ > EOF
+ $ hg status -mu
+ M x/aa
+ M x/bb
+ ? a
+ ? b
+ ? x/l/
+ ? x/m/
+ ? x/n/
+ ? y/
+
+Command line flag overrides the default
+ $ hg status --terse=
+ M x/aa
+ M x/bb
+ ? a
+ ? b
+ ? x/l/aa
+ ? x/l/u/a/bb
+ ? x/l/u/bb
+ ? x/m/aa
+ ? x/n/aa
+ ? y/l
+ ? y/m
+ $ hg status --terse=mardu
+ M x/aa
+ M x/bb
+ ? a
+ ? b
+ ? x/l/
+ ? x/m/
+ ? x/n/
+ ? y/
+
+Specifying --rev should still work, with the terseness disabled.
+ $ hg status --rev 0
+ M x/aa
+ M x/bb
+ ? a
+ ? b
+ ? x/l/aa
+ ? x/l/u/a/bb
+ ? x/l/u/bb
+ ? x/m/aa
+ ? x/n/aa
+ ? y/l
+ ? y/m
--- a/tests/test-status.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-status.t Wed Jun 06 13:31:24 2018 -0400
@@ -109,11 +109,8 @@
tweaking defaults works
$ hg status --cwd a --config ui.tweakdefaults=yes
- ? 1/in_a_1
- ? in_a
- ? ../b/1/in_b_1
- ? ../b/2/in_b_2
- ? ../b/in_b
+ ? .
+ ? ../b/
? ../in_root
$ HGPLAIN=1 hg status --cwd a --config ui.tweakdefaults=yes
? a/1/in_a_1 (glob)
@@ -123,11 +120,8 @@
? b/in_b (glob)
? in_root
$ HGPLAINEXCEPT=tweakdefaults hg status --cwd a --config ui.tweakdefaults=yes
- ? 1/in_a_1 (glob)
- ? in_a
- ? ../b/1/in_b_1 (glob)
- ? ../b/2/in_b_2 (glob)
- ? ../b/in_b (glob)
+ ? .
+ ? ../b/
? ../in_root (glob)
relative paths can be requested
@@ -157,11 +151,8 @@
> status.relative = False
> EOF
$ hg status --cwd a --config ui.tweakdefaults=yes
- ? a/1/in_a_1
- ? a/in_a
- ? b/1/in_b_1
- ? b/2/in_b_2
- ? b/in_b
+ ? a/
+ ? b/
? in_root
$ cd ..
@@ -465,12 +456,12 @@
$ hg init repo5
$ cd repo5
- >>> open("010a", r"wb").write(b"\1\nfoo")
+ >>> open("010a", r"wb").write(b"\1\nfoo") and None
$ hg ci -q -A -m 'initial checkin'
$ hg status -A
C 010a
- >>> open("010a", r"wb").write(b"\1\nbar")
+ >>> open("010a", r"wb").write(b"\1\nbar") and None
$ hg status -A
M 010a
$ hg ci -q -m 'modify 010a'
--- a/tests/test-subrepo-paths.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-subrepo-paths.t Wed Jun 06 13:31:24 2018 -0400
@@ -55,7 +55,7 @@
> .* = \1
> EOF
$ hg debugsub
- abort: bad subrepository pattern in $TESTTMP/outer/.hg/hgrc:2: invalid group reference
+ abort: bad subrepository pattern in $TESTTMP/outer/.hg/hgrc:2: invalid group reference* (glob)
[255]
$ cd ..
--- a/tests/test-subrepo-relative-path.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-subrepo-relative-path.t Wed Jun 06 13:31:24 2018 -0400
@@ -1,5 +1,3 @@
-#require killdaemons
-
Preparing the subrepository 'sub'
$ hg init sub
@@ -58,6 +56,30 @@
new changesets 863c1745b441
3 files updated, 0 files merged, 0 files removed, 0 files unresolved
+Ensure that subrepos pay attention to default:pushurl
+
+ $ cat > cloned/.hg/hgrc << EOF
+ > [paths]
+ > default:pushurl = http://localhost:$HGPORT/main
+ > EOF
+
+ $ hg -R cloned out -S --config paths.default=bogus://invalid
+ comparing with http://localhost:$HGPORT/main
+ searching for changes
+ no changes found
+ comparing with http://localhost:$HGPORT/sub
+ searching for changes
+ no changes found
+ [1]
+
+ $ hg -R cloned push --config paths.default=bogus://invalid
+ pushing to http://localhost:$HGPORT/main
+ no changes made to subrepo sub since last push to http://localhost:$HGPORT/sub
+ searching for changes
+ no changes found
+ abort: HTTP Error 403: ssl required
+ [255]
+
Checking cloned repo ids
$ hg id -R cloned
--- a/tests/test-symlink-os-yes-fs-no.py Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-symlink-os-yes-fs-no.py Wed Jun 06 13:31:24 2018 -0400
@@ -6,6 +6,7 @@
from mercurial import (
commands,
hg,
+ pycompat,
ui as uimod,
util,
)
@@ -19,13 +20,13 @@
u = uimod.ui.load()
# hide outer repo
-hg.peer(u, {}, '.', create=True)
+hg.peer(u, {}, b'.', create=True)
# unbundle with symlink support
-hg.peer(u, {}, 'test0', create=True)
+hg.peer(u, {}, b'test0', create=True)
-repo = hg.repository(u, 'test0')
-commands.unbundle(u, repo, BUNDLEPATH, update=True)
+repo = hg.repository(u, b'test0')
+commands.unbundle(u, repo, pycompat.fsencode(BUNDLEPATH), update=True)
# wait a bit, or the status call wont update the dirstate
time.sleep(1)
@@ -42,7 +43,7 @@
# dereference links as if a Samba server has exported this to a
# Windows client
-for f in 'test0/a.lnk', 'test0/d/b.lnk':
+for f in b'test0/a.lnk', b'test0/d/b.lnk':
os.unlink(f)
fp = open(f, 'wb')
fp.write(util.readfile(f[:-4]))
@@ -50,11 +51,11 @@
# reload repository
u = uimod.ui.load()
-repo = hg.repository(u, 'test0')
+repo = hg.repository(u, b'test0')
commands.status(u, repo)
# try unbundling a repo which contains symlinks
u = uimod.ui.load()
-repo = hg.repository(u, 'test1', create=True)
-commands.unbundle(u, repo, BUNDLEPATH, update=True)
+repo = hg.repository(u, b'test1', create=True)
+commands.unbundle(u, repo, pycompat.fsencode(BUNDLEPATH), update=True)
--- a/tests/test-symlink-placeholder.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-symlink-placeholder.t Wed Jun 06 13:31:24 2018 -0400
@@ -50,13 +50,13 @@
Write binary data to the placeholder:
- >>> open('b', 'w').write('this is a binary\0')
+ >>> open('b', 'w').write('this is a binary\0') and None
$ hg --config extensions.n=$TESTTMP/nolink.py st --debug
ignoring suspect symlink placeholder "b"
Write a long string to the placeholder:
- >>> open('b', 'w').write('this' * 1000)
+ >>> open('b', 'w').write('this' * 1000) and None
$ hg --config extensions.n=$TESTTMP/nolink.py st --debug
ignoring suspect symlink placeholder "b"
@@ -68,7 +68,7 @@
Write a valid string to the placeholder:
- >>> open('b', 'w').write('this')
+ >>> open('b', 'w').write('this') and None
$ hg --config extensions.n=$TESTTMP/nolink.py st --debug
M b
$ hg --config extensions.n=$TESTTMP/nolink.py ci -m1
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-template-filters.t Wed Jun 06 13:31:24 2018 -0400
@@ -0,0 +1,23 @@
+ $ hg debugtemplate '{""|splitlines|commonprefix}\n'
+
+ $ hg debugtemplate '{"foo/bar\nfoo/baz\nfoo/foobar\n"|splitlines|commonprefix}\n'
+ foo
+ $ hg debugtemplate '{"foo/bar\nfoo/bar\n"|splitlines|commonprefix}\n'
+ foo
+ $ hg debugtemplate '{"/foo/bar\n/foo/bar\n"|splitlines|commonprefix}\n'
+ foo
+ $ hg debugtemplate '{"/foo\n/foo\n"|splitlines|commonprefix}\n'
+
+ $ hg debugtemplate '{"foo/bar\nbar/baz"|splitlines|commonprefix}\n'
+
+ $ hg debugtemplate '{"foo/bar\nbar/baz\nbar/foo\n"|splitlines|commonprefix}\n'
+
+ $ hg debugtemplate '{"foo/../bar\nfoo/bar"|splitlines|commonprefix}\n'
+ foo
+ $ hg debugtemplate '{"foo\n/foo"|splitlines|commonprefix}\n'
+
+ $ hg init
+ $ hg log -r null -T '{rev|commonprefix}'
+ hg: parse error: argument is not a list of text
+ (template filter 'commonprefix' is not compatible with keyword 'rev')
+ [255]
--- a/tests/test-transplant.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-transplant.t Wed Jun 06 13:31:24 2018 -0400
@@ -1,5 +1,3 @@
-#require killdaemons
-
$ cat <<EOF >> $HGRCPATH
> [extensions]
> transplant=
--- a/tests/test-treediscovery-legacy.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-treediscovery-legacy.t Wed Jun 06 13:31:24 2018 -0400
@@ -1,5 +1,3 @@
-#require killdaemons
-
Tests discovery against servers without getbundle support:
$ cat >> $HGRCPATH <<EOF
@@ -356,6 +354,7 @@
pulling from http://localhost:$HGPORT/
searching for changes
no changes found
+ 1 changesets became public
$ hg push $remote
pushing to http://localhost:$HGPORT/
searching for changes
--- a/tests/test-treediscovery.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-treediscovery.t Wed Jun 06 13:31:24 2018 -0400
@@ -1,5 +1,3 @@
-#require killdaemons
-
Tests discovery against servers without getbundle support:
$ CAP="getbundle bundle2"
--- a/tests/test-treemanifest.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-treemanifest.t Wed Jun 06 13:31:24 2018 -0400
@@ -1,5 +1,3 @@
-#require killdaemons
-
$ cat << EOF >> $HGRCPATH
> [ui]
> ssh=$PYTHON "$TESTDIR/dummyssh"
--- a/tests/test-ui-color.py Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-ui-color.py Wed Jun 06 13:31:24 2018 -0400
@@ -5,6 +5,9 @@
dispatch,
ui as uimod,
)
+from mercurial.utils import (
+ stringutil,
+)
# ensure errors aren't buffered
testui = uimod.ui()
@@ -12,7 +15,7 @@
testui.write((b'buffered\n'))
testui.warn((b'warning\n'))
testui.write_err(b'error\n')
-print(repr(testui.popbuffer()))
+print(stringutil.pprint(testui.popbuffer(), bprefix=True).decode('ascii'))
# test dispatch.dispatch with the same ui object
hgrc = open(os.environ["HGRCPATH"], 'wb')
@@ -35,4 +38,3 @@
print("colored? %s" % (ui_._colormode is not None))
runcmd()
print("colored? %s" % (ui_._colormode is not None))
-
--- a/tests/test-ui-color.py.out Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-ui-color.py.out Wed Jun 06 13:31:24 2018 -0400
@@ -1,5 +1,5 @@
warning
error
-'buffered\n'
+b'buffered\n'
colored? True
colored? True
--- a/tests/test-ui-config.py Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-ui-config.py Wed Jun 06 13:31:24 2018 -0400
@@ -2,8 +2,12 @@
from mercurial import (
dispatch,
error,
+ pycompat,
ui as uimod,
)
+from mercurial.utils import (
+ stringutil,
+)
testui = uimod.ui.load()
@@ -46,59 +50,62 @@
b'date.invalid=0'
])
-print(repr(testui.configitems(b'values')))
-print(repr(testui.configitems(b'lists')))
+def pprint(obj):
+ return stringutil.pprint(obj).decode('ascii')
+
+print(pprint(testui.configitems(b'values')))
+print(pprint(testui.configitems(b'lists')))
print("---")
-print(repr(testui.config(b'values', b'string')))
-print(repr(testui.config(b'values', b'bool1')))
-print(repr(testui.config(b'values', b'bool2')))
-print(repr(testui.config(b'values', b'unknown')))
+print(pprint(testui.config(b'values', b'string')))
+print(pprint(testui.config(b'values', b'bool1')))
+print(pprint(testui.config(b'values', b'bool2')))
+print(pprint(testui.config(b'values', b'unknown')))
print("---")
try:
- print(repr(testui.configbool(b'values', b'string')))
+ print(pprint(testui.configbool(b'values', b'string')))
except error.ConfigError as inst:
- print(inst)
-print(repr(testui.configbool(b'values', b'bool1')))
-print(repr(testui.configbool(b'values', b'bool2')))
-print(repr(testui.configbool(b'values', b'bool2', True)))
-print(repr(testui.configbool(b'values', b'unknown')))
-print(repr(testui.configbool(b'values', b'unknown', True)))
+ print(pprint(pycompat.bytestr(inst)))
+print(pprint(testui.configbool(b'values', b'bool1')))
+print(pprint(testui.configbool(b'values', b'bool2')))
+print(pprint(testui.configbool(b'values', b'bool2', True)))
+print(pprint(testui.configbool(b'values', b'unknown')))
+print(pprint(testui.configbool(b'values', b'unknown', True)))
print("---")
-print(repr(testui.configint(b'values', b'int1')))
-print(repr(testui.configint(b'values', b'int2')))
+print(pprint(testui.configint(b'values', b'int1')))
+print(pprint(testui.configint(b'values', b'int2')))
print("---")
-print(repr(testui.configlist(b'lists', b'list1')))
-print(repr(testui.configlist(b'lists', b'list2')))
-print(repr(testui.configlist(b'lists', b'list3')))
-print(repr(testui.configlist(b'lists', b'list4')))
-print(repr(testui.configlist(b'lists', b'list4', [b'foo'])))
-print(repr(testui.configlist(b'lists', b'list5')))
-print(repr(testui.configlist(b'lists', b'list6')))
-print(repr(testui.configlist(b'lists', b'list7')))
-print(repr(testui.configlist(b'lists', b'list8')))
-print(repr(testui.configlist(b'lists', b'list9')))
-print(repr(testui.configlist(b'lists', b'list10')))
-print(repr(testui.configlist(b'lists', b'list11')))
-print(repr(testui.configlist(b'lists', b'list12')))
-print(repr(testui.configlist(b'lists', b'list13')))
-print(repr(testui.configlist(b'lists', b'list14')))
-print(repr(testui.configlist(b'lists', b'list15')))
-print(repr(testui.configlist(b'lists', b'list16')))
-print(repr(testui.configlist(b'lists', b'list17')))
-print(repr(testui.configlist(b'lists', b'list18')))
-print(repr(testui.configlist(b'lists', b'unknown')))
-print(repr(testui.configlist(b'lists', b'unknown', b'')))
-print(repr(testui.configlist(b'lists', b'unknown', b'foo')))
-print(repr(testui.configlist(b'lists', b'unknown', [b'foo'])))
-print(repr(testui.configlist(b'lists', b'unknown', b'foo bar')))
-print(repr(testui.configlist(b'lists', b'unknown', b'foo, bar')))
-print(repr(testui.configlist(b'lists', b'unknown', [b'foo bar'])))
-print(repr(testui.configlist(b'lists', b'unknown', [b'foo', b'bar'])))
+print(pprint(testui.configlist(b'lists', b'list1')))
+print(pprint(testui.configlist(b'lists', b'list2')))
+print(pprint(testui.configlist(b'lists', b'list3')))
+print(pprint(testui.configlist(b'lists', b'list4')))
+print(pprint(testui.configlist(b'lists', b'list4', [b'foo'])))
+print(pprint(testui.configlist(b'lists', b'list5')))
+print(pprint(testui.configlist(b'lists', b'list6')))
+print(pprint(testui.configlist(b'lists', b'list7')))
+print(pprint(testui.configlist(b'lists', b'list8')))
+print(pprint(testui.configlist(b'lists', b'list9')))
+print(pprint(testui.configlist(b'lists', b'list10')))
+print(pprint(testui.configlist(b'lists', b'list11')))
+print(pprint(testui.configlist(b'lists', b'list12')))
+print(pprint(testui.configlist(b'lists', b'list13')))
+print(pprint(testui.configlist(b'lists', b'list14')))
+print(pprint(testui.configlist(b'lists', b'list15')))
+print(pprint(testui.configlist(b'lists', b'list16')))
+print(pprint(testui.configlist(b'lists', b'list17')))
+print(pprint(testui.configlist(b'lists', b'list18')))
+print(pprint(testui.configlist(b'lists', b'unknown')))
+print(pprint(testui.configlist(b'lists', b'unknown', b'')))
+print(pprint(testui.configlist(b'lists', b'unknown', b'foo')))
+print(pprint(testui.configlist(b'lists', b'unknown', [b'foo'])))
+print(pprint(testui.configlist(b'lists', b'unknown', b'foo bar')))
+print(pprint(testui.configlist(b'lists', b'unknown', b'foo, bar')))
+print(pprint(testui.configlist(b'lists', b'unknown', [b'foo bar'])))
+print(pprint(testui.configlist(b'lists', b'unknown', [b'foo', b'bar'])))
print("---")
-print(repr(testui.configdate(b'date', b'epoch')))
-print(repr(testui.configdate(b'date', b'birth')))
+print(pprint(testui.configdate(b'date', b'epoch')))
+print(pprint(testui.configdate(b'date', b'birth')))
-print(repr(testui.config(b'values', b'String')))
+print(pprint(testui.config(b'values', b'String')))
def function():
pass
--- a/tests/test-ui-config.py.out Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-ui-config.py.out Wed Jun 06 13:31:24 2018 -0400
@@ -6,7 +6,7 @@
'false'
None
---
-values.string is not a boolean ('string value')
+'values.string is not a boolean (\'string value\')'
True
False
False
--- a/tests/test-ui-verbosity.py Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-ui-verbosity.py Wed Jun 06 13:31:24 2018 -0400
@@ -38,9 +38,9 @@
u = uimod.ui.load()
if cmd_quiet or cmd_debug or cmd_verbose:
- u.setconfig('ui', 'quiet', str(bool(cmd_quiet)))
- u.setconfig('ui', 'verbose', str(bool(cmd_verbose)))
- u.setconfig('ui', 'debug', str(bool(cmd_debug)))
+ u.setconfig(b'ui', b'quiet', pycompat.bytestr(bool(cmd_quiet)))
+ u.setconfig(b'ui', b'verbose', pycompat.bytestr(bool(cmd_verbose)))
+ u.setconfig(b'ui', b'debug', pycompat.bytestr(bool(cmd_debug)))
check = ''
if u.debugflag:
--- a/tests/test-unbundlehash.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-unbundlehash.t Wed Jun 06 13:31:24 2018 -0400
@@ -1,5 +1,3 @@
-#require killdaemons
-
Test wire protocol unbundle with hashed heads (capability: unbundlehash)
$ cat << EOF >> $HGRCPATH
--- a/tests/test-unified-test.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-unified-test.t Wed Jun 06 13:31:24 2018 -0400
@@ -84,7 +84,7 @@
foo/bar\r (no-eol) (esc)
#endif
$ printf 'foo/bar\rfoo/bar\r'
- foo.bar\r \(no-eol\) (re) (esc)
+ foo.bar\r [(]no-eol[)] (re) (esc)
foo.bar\r \(no-eol\) (re)
testing hghave
--- a/tests/test-update-branches.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-update-branches.t Wed Jun 06 13:31:24 2018 -0400
@@ -516,11 +516,33 @@
$ hg bookmarks
* bm 5:ff252e8273df
+Test that we abort before we warn about the hidden commit if the working
+directory is dirty
+ $ echo conflict > a
+ $ hg up --hidden 3
+ abort: uncommitted changes
+ (commit or update --clean to discard changes)
+ [255]
+
+Test that we still warn also when there are conflicts
+ $ hg up -m --hidden 3
+ merging a
+ warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
+ 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+ use 'hg resolve' to retry unresolved file merges
+ (leaving bookmark bm)
+ updated to hidden changeset 6efa171f091b
+ (hidden revision '6efa171f091b' was rewritten as: d047485b3896)
+ [1]
+ $ hg revert -r . a
+ $ hg resolve -m
+ (no more unresolved files)
+
Test that 4 is detected as the no-argument destination from 3 and also moves
the bookmark with it
$ hg up --quiet 0 # we should be able to update to 3 directly
$ hg up --quiet --hidden 3 # but not implemented yet.
- updating to a hidden changeset 6efa171f091b
+ updated to hidden changeset 6efa171f091b
(hidden revision '6efa171f091b' was rewritten as: d047485b3896)
$ hg book -f bm
$ hg up
@@ -532,7 +554,7 @@
Test that 5 is detected as a valid destination from 1
$ hg up --quiet 0 # we should be able to update to 3 directly
$ hg up --quiet --hidden 3 # but not implemented yet.
- updating to a hidden changeset 6efa171f091b
+ updated to hidden changeset 6efa171f091b
(hidden revision '6efa171f091b' was rewritten as: d047485b3896)
$ hg up 5
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-url.py Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-url.py Wed Jun 06 13:31:24 2018 -0400
@@ -20,17 +20,17 @@
check(_verifycert(cert('example.com'), 'example.com'),
None)
check(_verifycert(cert('example.com'), 'www.example.com'),
- 'certificate is for example.com')
+ b'certificate is for example.com')
check(_verifycert(cert('www.example.com'), 'example.com'),
- 'certificate is for www.example.com')
+ b'certificate is for www.example.com')
# Test wildcard certificates
check(_verifycert(cert('*.example.com'), 'www.example.com'),
None)
check(_verifycert(cert('*.example.com'), 'example.com'),
- 'certificate is for *.example.com')
+ b'certificate is for *.example.com')
check(_verifycert(cert('*.example.com'), 'w.w.example.com'),
- 'certificate is for *.example.com')
+ b'certificate is for *.example.com')
# Test subjectAltName
san_cert = {'subject': ((('commonName', 'example.com'),),),
@@ -42,7 +42,7 @@
None)
# no fallback to subject commonName when subjectAltName has DNS
check(_verifycert(san_cert, 'example.com'),
- 'certificate is for *.example.net, example.net')
+ b'certificate is for *.example.net, example.net')
# fallback to subject commonName when no DNS in subjectAltName
san_cert = {'subject': ((('commonName', 'example.com'),),),
'subjectAltName': (('IP Address', '8.8.8.8'),)}
@@ -50,84 +50,84 @@
# Avoid some pitfalls
check(_verifycert(cert('*.foo'), 'foo'),
- 'certificate is for *.foo')
+ b'certificate is for *.foo')
check(_verifycert(cert('*o'), 'foo'), None)
check(_verifycert({'subject': ()},
'example.com'),
- 'no commonName or subjectAltName found in certificate')
+ b'no commonName or subjectAltName found in certificate')
check(_verifycert(None, 'example.com'),
- 'no certificate received')
+ b'no certificate received')
# Unicode (IDN) certname isn't supported
check(_verifycert(cert(u'\u4f8b.jp'), 'example.jp'),
- 'IDN in certificate not supported')
+ b'IDN in certificate not supported')
# The following tests are from CPython's test_ssl.py.
check(_verifycert(cert('example.com'), 'example.com'), None)
check(_verifycert(cert('example.com'), 'ExAmple.cOm'), None)
check(_verifycert(cert('example.com'), 'www.example.com'),
- 'certificate is for example.com')
+ b'certificate is for example.com')
check(_verifycert(cert('example.com'), '.example.com'),
- 'certificate is for example.com')
+ b'certificate is for example.com')
check(_verifycert(cert('example.com'), 'example.org'),
- 'certificate is for example.com')
+ b'certificate is for example.com')
check(_verifycert(cert('example.com'), 'exampleXcom'),
- 'certificate is for example.com')
+ b'certificate is for example.com')
check(_verifycert(cert('*.a.com'), 'foo.a.com'), None)
check(_verifycert(cert('*.a.com'), 'bar.foo.a.com'),
- 'certificate is for *.a.com')
+ b'certificate is for *.a.com')
check(_verifycert(cert('*.a.com'), 'a.com'),
- 'certificate is for *.a.com')
+ b'certificate is for *.a.com')
check(_verifycert(cert('*.a.com'), 'Xa.com'),
- 'certificate is for *.a.com')
+ b'certificate is for *.a.com')
check(_verifycert(cert('*.a.com'), '.a.com'),
- 'certificate is for *.a.com')
+ b'certificate is for *.a.com')
# only match one left-most wildcard
check(_verifycert(cert('f*.com'), 'foo.com'), None)
check(_verifycert(cert('f*.com'), 'f.com'), None)
check(_verifycert(cert('f*.com'), 'bar.com'),
- 'certificate is for f*.com')
+ b'certificate is for f*.com')
check(_verifycert(cert('f*.com'), 'foo.a.com'),
- 'certificate is for f*.com')
+ b'certificate is for f*.com')
check(_verifycert(cert('f*.com'), 'bar.foo.com'),
- 'certificate is for f*.com')
+ b'certificate is for f*.com')
# NULL bytes are bad, CVE-2013-4073
check(_verifycert(cert('null.python.org\x00example.org'),
'null.python.org\x00example.org'), None)
check(_verifycert(cert('null.python.org\x00example.org'),
'example.org'),
- 'certificate is for null.python.org\x00example.org')
+ b'certificate is for null.python.org\x00example.org')
check(_verifycert(cert('null.python.org\x00example.org'),
'null.python.org'),
- 'certificate is for null.python.org\x00example.org')
+ b'certificate is for null.python.org\x00example.org')
# error cases with wildcards
check(_verifycert(cert('*.*.a.com'), 'bar.foo.a.com'),
- 'certificate is for *.*.a.com')
+ b'certificate is for *.*.a.com')
check(_verifycert(cert('*.*.a.com'), 'a.com'),
- 'certificate is for *.*.a.com')
+ b'certificate is for *.*.a.com')
check(_verifycert(cert('*.*.a.com'), 'Xa.com'),
- 'certificate is for *.*.a.com')
+ b'certificate is for *.*.a.com')
check(_verifycert(cert('*.*.a.com'), '.a.com'),
- 'certificate is for *.*.a.com')
+ b'certificate is for *.*.a.com')
check(_verifycert(cert('a.*.com'), 'a.foo.com'),
- 'certificate is for a.*.com')
+ b'certificate is for a.*.com')
check(_verifycert(cert('a.*.com'), 'a..com'),
- 'certificate is for a.*.com')
+ b'certificate is for a.*.com')
check(_verifycert(cert('a.*.com'), 'a.com'),
- 'certificate is for a.*.com')
+ b'certificate is for a.*.com')
# wildcard doesn't match IDNA prefix 'xn--'
idna = u'püthon.python.org'.encode('idna').decode('ascii')
check(_verifycert(cert(idna), idna), None)
check(_verifycert(cert('x*.python.org'), idna),
- 'certificate is for x*.python.org')
+ b'certificate is for x*.python.org')
check(_verifycert(cert('xn--p*.python.org'), idna),
- 'certificate is for xn--p*.python.org')
+ b'certificate is for xn--p*.python.org')
# wildcard in first fragment and IDNA A-labels in sequent fragments
# are supported.
@@ -140,10 +140,10 @@
None)
check(_verifycert(cert(idna),
u'ftp.pythön.org'.encode('idna').decode('ascii')),
- 'certificate is for www*.xn--pythn-mua.org')
+ b'certificate is for www*.xn--pythn-mua.org')
check(_verifycert(cert(idna),
u'pythön.org'.encode('idna').decode('ascii')),
- 'certificate is for www*.xn--pythn-mua.org')
+ b'certificate is for www*.xn--pythn-mua.org')
c = {
'notAfter': 'Jun 26 21:41:46 2011 GMT',
@@ -158,10 +158,10 @@
check(_verifycert(c, 'linuxfr.com'), None)
# Not a "DNS" entry
check(_verifycert(c, '<unsupported>'),
- 'certificate is for linuxfr.org, linuxfr.com')
+ b'certificate is for linuxfr.org, linuxfr.com')
# When there is a subjectAltName, commonName isn't used
check(_verifycert(c, 'linuxfrz.org'),
- 'certificate is for linuxfr.org, linuxfr.com')
+ b'certificate is for linuxfr.org, linuxfr.com')
# A pristine real-world example
c = {
@@ -175,10 +175,10 @@
),
}
check(_verifycert(c, 'mail.google.com'), None)
-check(_verifycert(c, 'gmail.com'), 'certificate is for mail.google.com')
+check(_verifycert(c, 'gmail.com'), b'certificate is for mail.google.com')
# Only commonName is considered
-check(_verifycert(c, 'California'), 'certificate is for mail.google.com')
+check(_verifycert(c, 'California'), b'certificate is for mail.google.com')
# Neither commonName nor subjectAltName
c = {
@@ -191,7 +191,7 @@
),
}
check(_verifycert(c, 'mail.google.com'),
- 'no commonName or subjectAltName found in certificate')
+ b'no commonName or subjectAltName found in certificate')
# No DNS entry in subjectAltName but a commonName
c = {
@@ -218,25 +218,27 @@
'subjectAltName': (('othername', 'blabla'),),
}
check(_verifycert(c, 'google.com'),
- 'no commonName or subjectAltName found in certificate')
+ b'no commonName or subjectAltName found in certificate')
# Empty cert / no cert
-check(_verifycert(None, 'example.com'), 'no certificate received')
-check(_verifycert({}, 'example.com'), 'no certificate received')
+check(_verifycert(None, 'example.com'), b'no certificate received')
+check(_verifycert({}, 'example.com'), b'no certificate received')
# avoid denials of service by refusing more than one
# wildcard per fragment.
check(_verifycert({'subject': (((u'commonName', u'a*b.com'),),)},
'axxb.com'), None)
check(_verifycert({'subject': (((u'commonName', u'a*b.co*'),),)},
- 'axxb.com'), 'certificate is for a*b.co*')
+ 'axxb.com'), b'certificate is for a*b.co*')
check(_verifycert({'subject': (((u'commonName', u'a*b*.com'),),)},
'axxbxxc.com'),
- 'too many wildcards in certificate DNS name: a*b*.com')
+ b'too many wildcards in certificate DNS name: a*b*.com')
def test_url():
"""
+ >>> from mercurial import error, pycompat
>>> from mercurial.util import url
+ >>> from mercurial.utils.stringutil import forcebytestr
This tests for edge cases in url.URL's parsing algorithm. Most of
these aren't useful for documentation purposes, so they aren't
@@ -244,119 +246,120 @@
Query strings and fragments:
- >>> url('http://host/a?b#c')
+ >>> url(b'http://host/a?b#c')
<url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
- >>> url('http://host/a?')
+ >>> url(b'http://host/a?')
<url scheme: 'http', host: 'host', path: 'a'>
- >>> url('http://host/a#b#c')
+ >>> url(b'http://host/a#b#c')
<url scheme: 'http', host: 'host', path: 'a', fragment: 'b#c'>
- >>> url('http://host/a#b?c')
+ >>> url(b'http://host/a#b?c')
<url scheme: 'http', host: 'host', path: 'a', fragment: 'b?c'>
- >>> url('http://host/?a#b')
+ >>> url(b'http://host/?a#b')
<url scheme: 'http', host: 'host', path: '', query: 'a', fragment: 'b'>
- >>> url('http://host/?a#b', parsequery=False)
+ >>> url(b'http://host/?a#b', parsequery=False)
<url scheme: 'http', host: 'host', path: '?a', fragment: 'b'>
- >>> url('http://host/?a#b', parsefragment=False)
+ >>> url(b'http://host/?a#b', parsefragment=False)
<url scheme: 'http', host: 'host', path: '', query: 'a#b'>
- >>> url('http://host/?a#b', parsequery=False, parsefragment=False)
+ >>> url(b'http://host/?a#b', parsequery=False, parsefragment=False)
<url scheme: 'http', host: 'host', path: '?a#b'>
IPv6 addresses:
- >>> url('ldap://[2001:db8::7]/c=GB?objectClass?one')
+ >>> url(b'ldap://[2001:db8::7]/c=GB?objectClass?one')
<url scheme: 'ldap', host: '[2001:db8::7]', path: 'c=GB',
query: 'objectClass?one'>
- >>> url('ldap://joe:xxx@[2001:db8::7]:80/c=GB?objectClass?one')
+ >>> url(b'ldap://joe:xxx@[2001:db8::7]:80/c=GB?objectClass?one')
<url scheme: 'ldap', user: 'joe', passwd: 'xxx', host: '[2001:db8::7]',
port: '80', path: 'c=GB', query: 'objectClass?one'>
Missing scheme, host, etc.:
- >>> url('://192.0.2.16:80/')
+ >>> url(b'://192.0.2.16:80/')
<url path: '://192.0.2.16:80/'>
- >>> url('https://mercurial-scm.org')
+ >>> url(b'https://mercurial-scm.org')
<url scheme: 'https', host: 'mercurial-scm.org'>
- >>> url('/foo')
+ >>> url(b'/foo')
<url path: '/foo'>
- >>> url('bundle:/foo')
+ >>> url(b'bundle:/foo')
<url scheme: 'bundle', path: '/foo'>
- >>> url('a?b#c')
+ >>> url(b'a?b#c')
<url path: 'a?b', fragment: 'c'>
- >>> url('http://x.com?arg=/foo')
+ >>> url(b'http://x.com?arg=/foo')
<url scheme: 'http', host: 'x.com', query: 'arg=/foo'>
- >>> url('http://joe:xxx@/foo')
+ >>> url(b'http://joe:xxx@/foo')
<url scheme: 'http', user: 'joe', passwd: 'xxx', path: 'foo'>
Just a scheme and a path:
- >>> url('mailto:John.Doe@example.com')
+ >>> url(b'mailto:John.Doe@example.com')
<url scheme: 'mailto', path: 'John.Doe@example.com'>
- >>> url('a:b:c:d')
+ >>> url(b'a:b:c:d')
<url path: 'a:b:c:d'>
- >>> url('aa:bb:cc:dd')
+ >>> url(b'aa:bb:cc:dd')
<url scheme: 'aa', path: 'bb:cc:dd'>
SSH examples:
- >>> url('ssh://joe@host//home/joe')
+ >>> url(b'ssh://joe@host//home/joe')
<url scheme: 'ssh', user: 'joe', host: 'host', path: '/home/joe'>
- >>> url('ssh://joe:xxx@host/src')
+ >>> url(b'ssh://joe:xxx@host/src')
<url scheme: 'ssh', user: 'joe', passwd: 'xxx', host: 'host', path: 'src'>
- >>> url('ssh://joe:xxx@host')
+ >>> url(b'ssh://joe:xxx@host')
<url scheme: 'ssh', user: 'joe', passwd: 'xxx', host: 'host'>
- >>> url('ssh://joe@host')
+ >>> url(b'ssh://joe@host')
<url scheme: 'ssh', user: 'joe', host: 'host'>
- >>> url('ssh://host')
+ >>> url(b'ssh://host')
<url scheme: 'ssh', host: 'host'>
- >>> url('ssh://')
+ >>> url(b'ssh://')
<url scheme: 'ssh'>
- >>> url('ssh:')
+ >>> url(b'ssh:')
<url scheme: 'ssh'>
Non-numeric port:
- >>> url('http://example.com:dd')
+ >>> url(b'http://example.com:dd')
<url scheme: 'http', host: 'example.com', port: 'dd'>
- >>> url('ssh://joe:xxx@host:ssh/foo')
+ >>> url(b'ssh://joe:xxx@host:ssh/foo')
<url scheme: 'ssh', user: 'joe', passwd: 'xxx', host: 'host', port: 'ssh',
path: 'foo'>
Bad authentication credentials:
- >>> url('http://joe@joeville:123@4:@host/a?b#c')
+ >>> url(b'http://joe@joeville:123@4:@host/a?b#c')
<url scheme: 'http', user: 'joe@joeville', passwd: '123@4:',
host: 'host', path: 'a', query: 'b', fragment: 'c'>
- >>> url('http://!*#?/@!*#?/:@host/a?b#c')
+ >>> url(b'http://!*#?/@!*#?/:@host/a?b#c')
<url scheme: 'http', host: '!*', fragment: '?/@!*#?/:@host/a?b#c'>
- >>> url('http://!*#?@!*#?:@host/a?b#c')
+ >>> url(b'http://!*#?@!*#?:@host/a?b#c')
<url scheme: 'http', host: '!*', fragment: '?@!*#?:@host/a?b#c'>
- >>> url('http://!*@:!*@@host/a?b#c')
+ >>> url(b'http://!*@:!*@@host/a?b#c')
<url scheme: 'http', user: '!*@', passwd: '!*@', host: 'host',
path: 'a', query: 'b', fragment: 'c'>
File paths:
- >>> url('a/b/c/d.g.f')
+ >>> url(b'a/b/c/d.g.f')
<url path: 'a/b/c/d.g.f'>
- >>> url('/x///z/y/')
+ >>> url(b'/x///z/y/')
<url path: '/x///z/y/'>
- >>> url('/foo:bar')
+ >>> url(b'/foo:bar')
<url path: '/foo:bar'>
- >>> url('\\\\foo:bar')
+ >>> url(b'\\\\foo:bar')
<url path: '\\\\foo:bar'>
- >>> url('./foo:bar')
+ >>> url(b'./foo:bar')
<url path: './foo:bar'>
Non-localhost file URL:
- >>> u = url('file://mercurial-scm.org/foo')
- Traceback (most recent call last):
- File "<stdin>", line 1, in ?
- Abort: file:// URLs can only refer to localhost
+ >>> try:
+ ... u = url(b'file://mercurial-scm.org/foo')
+ ... except error.Abort as e:
+ ... forcebytestr(e)
+ 'file:// URLs can only refer to localhost'
Empty URL:
- >>> u = url('')
+ >>> u = url(b'')
>>> u
<url path: ''>
>>> str(u)
@@ -364,54 +367,54 @@
Empty path with query string:
- >>> str(url('http://foo/?bar'))
+ >>> str(url(b'http://foo/?bar'))
'http://foo/?bar'
Invalid path:
- >>> u = url('http://foo/bar')
- >>> u.path = 'bar'
+ >>> u = url(b'http://foo/bar')
+ >>> u.path = b'bar'
>>> str(u)
'http://foo/bar'
- >>> u = url('file:/foo/bar/baz')
+ >>> u = url(b'file:/foo/bar/baz')
>>> u
<url scheme: 'file', path: '/foo/bar/baz'>
>>> str(u)
'file:///foo/bar/baz'
- >>> u.localpath()
+ >>> pycompat.bytestr(u.localpath())
'/foo/bar/baz'
- >>> u = url('file:///foo/bar/baz')
+ >>> u = url(b'file:///foo/bar/baz')
>>> u
<url scheme: 'file', path: '/foo/bar/baz'>
>>> str(u)
'file:///foo/bar/baz'
- >>> u.localpath()
+ >>> pycompat.bytestr(u.localpath())
'/foo/bar/baz'
- >>> u = url('file:///f:oo/bar/baz')
+ >>> u = url(b'file:///f:oo/bar/baz')
>>> u
<url scheme: 'file', path: 'f:oo/bar/baz'>
>>> str(u)
'file:///f:oo/bar/baz'
- >>> u.localpath()
+ >>> pycompat.bytestr(u.localpath())
'f:oo/bar/baz'
- >>> u = url('file://localhost/f:oo/bar/baz')
+ >>> u = url(b'file://localhost/f:oo/bar/baz')
>>> u
<url scheme: 'file', host: 'localhost', path: 'f:oo/bar/baz'>
>>> str(u)
'file://localhost/f:oo/bar/baz'
- >>> u.localpath()
+ >>> pycompat.bytestr(u.localpath())
'f:oo/bar/baz'
- >>> u = url('file:foo/bar/baz')
+ >>> u = url(b'file:foo/bar/baz')
>>> u
<url scheme: 'file', path: 'foo/bar/baz'>
>>> str(u)
'file:foo/bar/baz'
- >>> u.localpath()
+ >>> pycompat.bytestr(u.localpath())
'foo/bar/baz'
"""
--- a/tests/test-walkrepo.py Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-walkrepo.py Wed Jun 06 13:31:24 2018 -0400
@@ -17,22 +17,22 @@
checklink = util.checklink
u = uimod.ui.load()
-sym = checklink('.')
+sym = checklink(b'.')
-hg.repository(u, 'top1', create=1)
-mkdir('subdir')
-chdir('subdir')
-hg.repository(u, 'sub1', create=1)
-mkdir('subsubdir')
-chdir('subsubdir')
-hg.repository(u, 'subsub1', create=1)
+hg.repository(u, b'top1', create=1)
+mkdir(b'subdir')
+chdir(b'subdir')
+hg.repository(u, b'sub1', create=1)
+mkdir(b'subsubdir')
+chdir(b'subsubdir')
+hg.repository(u, b'subsub1', create=1)
chdir(os.path.pardir)
if sym:
- os.symlink(os.path.pardir, 'circle')
- os.symlink(pjoin('subsubdir', 'subsub1'), 'subsub1')
+ os.symlink(os.path.pardir, b'circle')
+ os.symlink(pjoin(b'subsubdir', b'subsub1'), b'subsub1')
def runtest():
- reposet = frozenset(walkrepos('.', followsym=True))
+ reposet = frozenset(walkrepos(b'.', followsym=True))
if sym and (len(reposet) != 3):
print("reposet = %r" % (reposet,))
print(("Found %d repositories when I should have found 3"
@@ -41,19 +41,19 @@
print("reposet = %r" % (reposet,))
print(("Found %d repositories when I should have found 2"
% (len(reposet),)))
- sub1set = frozenset((pjoin('.', 'sub1'),
- pjoin('.', 'circle', 'subdir', 'sub1')))
+ sub1set = frozenset((pjoin(b'.', b'sub1'),
+ pjoin(b'.', b'circle', b'subdir', b'sub1')))
if len(sub1set & reposet) != 1:
print("sub1set = %r" % (sub1set,))
print("reposet = %r" % (reposet,))
print("sub1set and reposet should have exactly one path in common.")
- sub2set = frozenset((pjoin('.', 'subsub1'),
- pjoin('.', 'subsubdir', 'subsub1')))
+ sub2set = frozenset((pjoin(b'.', b'subsub1'),
+ pjoin(b'.', b'subsubdir', b'subsub1')))
if len(sub2set & reposet) != 1:
print("sub2set = %r" % (sub2set,))
print("reposet = %r" % (reposet,))
print("sub2set and reposet should have exactly one path in common.")
- sub3 = pjoin('.', 'circle', 'top1')
+ sub3 = pjoin(b'.', b'circle', b'top1')
if sym and sub3 not in reposet:
print("reposet = %r" % (reposet,))
print("Symbolic links are supported and %s is not in reposet" % (sub3,))
--- a/tests/test-wireproto-command-capabilities.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-wireproto-command-capabilities.t Wed Jun 06 13:31:24 2018 -0400
@@ -1,3 +1,5 @@
+#require no-chg
+
$ . $TESTDIR/wireprotohelpers.sh
$ hg init server
--- a/tests/test-wireproto.py Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-wireproto.py Wed Jun 06 13:31:24 2018 -0400
@@ -1,5 +1,7 @@
from __future__ import absolute_import, print_function
+import sys
+
from mercurial import (
error,
pycompat,
@@ -9,6 +11,9 @@
wireprotov1peer,
wireprotov1server,
)
+from mercurial.utils import (
+ stringutil,
+)
stringio = util.stringio
class proto(object):
@@ -92,10 +97,16 @@
srv = serverrepo()
clt = clientpeer(srv, uimod.ui())
-print(clt.greet(b"Foobar"))
+def printb(data, end=b'\n'):
+ out = getattr(sys.stdout, 'buffer', sys.stdout)
+ out.write(data + end)
+ out.flush()
+
+printb(clt.greet(b"Foobar"))
with clt.commandexecutor() as e:
fgreet1 = e.callcommand(b'greet', {b'name': b'Fo, =;:<o'})
fgreet2 = e.callcommand(b'greet', {b'name': b'Bar'})
-print([f.result() for f in (fgreet1, fgreet2)])
+printb(stringutil.pprint([f.result() for f in (fgreet1, fgreet2)],
+ bprefix=True))
--- a/tests/test-wireproto.py.out Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-wireproto.py.out Wed Jun 06 13:31:24 2018 -0400
@@ -1,2 +1,2 @@
Hello, Foobar
-['Hello, Fo, =;:<o', 'Hello, Bar']
+[b'Hello, Fo, =;:<o', b'Hello, Bar']
--- a/tests/test-wireproto.t Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-wireproto.t Wed Jun 06 13:31:24 2018 -0400
@@ -1,5 +1,3 @@
-#require killdaemons
-
Test wire protocol argument passing
Setup repo:
--- a/tests/test-wsgirequest.py Wed Jun 06 13:28:49 2018 -0400
+++ b/tests/test-wsgirequest.py Wed Jun 06 13:31:24 2018 -0400
@@ -206,18 +206,18 @@
"""repository path components get stripped from URL."""
with self.assertRaisesRegex(error.ProgrammingError,
- b'reponame requires PATH_INFO'):
+ 'reponame requires PATH_INFO'):
parse(DEFAULT_ENV, reponame=b'repo')
with self.assertRaisesRegex(error.ProgrammingError,
- b'PATH_INFO does not begin with repo '
- b'name'):
+ 'PATH_INFO does not begin with repo '
+ 'name'):
parse(DEFAULT_ENV, reponame=b'repo', extra={
r'PATH_INFO': r'/pathinfo',
})
with self.assertRaisesRegex(error.ProgrammingError,
- b'reponame prefix of PATH_INFO'):
+ 'reponame prefix of PATH_INFO'):
parse(DEFAULT_ENV, reponame=b'repo', extra={
r'PATH_INFO': r'/repoextra/path',
})
@@ -251,7 +251,7 @@
def testaltbaseurl(self):
# Simple hostname remap.
- r = parse(DEFAULT_ENV, altbaseurl='http://altserver')
+ r = parse(DEFAULT_ENV, altbaseurl=b'http://altserver')
self.assertEqual(r.url, b'http://testserver')
self.assertEqual(r.baseurl, b'http://testserver')
@@ -264,7 +264,7 @@
self.assertIsNone(r.reponame)
# With a custom port.
- r = parse(DEFAULT_ENV, altbaseurl='http://altserver:8000')
+ r = parse(DEFAULT_ENV, altbaseurl=b'http://altserver:8000')
self.assertEqual(r.url, b'http://testserver')
self.assertEqual(r.baseurl, b'http://testserver')
self.assertEqual(r.advertisedurl, b'http://altserver:8000')
@@ -276,7 +276,7 @@
self.assertIsNone(r.reponame)
# With a changed protocol.
- r = parse(DEFAULT_ENV, altbaseurl='https://altserver')
+ r = parse(DEFAULT_ENV, altbaseurl=b'https://altserver')
self.assertEqual(r.url, b'http://testserver')
self.assertEqual(r.baseurl, b'http://testserver')
self.assertEqual(r.advertisedurl, b'https://altserver')
@@ -289,7 +289,7 @@
self.assertIsNone(r.reponame)
# Need to specify explicit port number for proper https:// alt URLs.
- r = parse(DEFAULT_ENV, altbaseurl='https://altserver:443')
+ r = parse(DEFAULT_ENV, altbaseurl=b'https://altserver:443')
self.assertEqual(r.url, b'http://testserver')
self.assertEqual(r.baseurl, b'http://testserver')
self.assertEqual(r.advertisedurl, b'https://altserver')
@@ -301,7 +301,7 @@
self.assertIsNone(r.reponame)
# With only PATH_INFO defined.
- r = parse(DEFAULT_ENV, altbaseurl='http://altserver', extra={
+ r = parse(DEFAULT_ENV, altbaseurl=b'http://altserver', extra={
r'PATH_INFO': r'/path1/path2',
})
self.assertEqual(r.url, b'http://testserver/path1/path2')
@@ -315,7 +315,7 @@
self.assertIsNone(r.reponame)
# Path on alt URL.
- r = parse(DEFAULT_ENV, altbaseurl='http://altserver/altpath')
+ r = parse(DEFAULT_ENV, altbaseurl=b'http://altserver/altpath')
self.assertEqual(r.url, b'http://testserver')
self.assertEqual(r.baseurl, b'http://testserver')
self.assertEqual(r.advertisedurl, b'http://altserver/altpath')
@@ -327,7 +327,7 @@
self.assertIsNone(r.reponame)
# With a trailing slash.
- r = parse(DEFAULT_ENV, altbaseurl='http://altserver/altpath/')
+ r = parse(DEFAULT_ENV, altbaseurl=b'http://altserver/altpath/')
self.assertEqual(r.url, b'http://testserver')
self.assertEqual(r.baseurl, b'http://testserver')
self.assertEqual(r.advertisedurl, b'http://altserver/altpath/')
@@ -339,7 +339,7 @@
self.assertIsNone(r.reponame)
# PATH_INFO + path on alt URL.
- r = parse(DEFAULT_ENV, altbaseurl='http://altserver/altpath', extra={
+ r = parse(DEFAULT_ENV, altbaseurl=b'http://altserver/altpath', extra={
r'PATH_INFO': r'/path1/path2',
})
self.assertEqual(r.url, b'http://testserver/path1/path2')
@@ -354,7 +354,7 @@
self.assertIsNone(r.reponame)
# PATH_INFO + path on alt URL with trailing slash.
- r = parse(DEFAULT_ENV, altbaseurl='http://altserver/altpath/', extra={
+ r = parse(DEFAULT_ENV, altbaseurl=b'http://altserver/altpath/', extra={
r'PATH_INFO': r'/path1/path2',
})
self.assertEqual(r.url, b'http://testserver/path1/path2')
@@ -369,7 +369,7 @@
self.assertIsNone(r.reponame)
# Local SCRIPT_NAME is ignored.
- r = parse(DEFAULT_ENV, altbaseurl='http://altserver', extra={
+ r = parse(DEFAULT_ENV, altbaseurl=b'http://altserver', extra={
r'SCRIPT_NAME': r'/script',
r'PATH_INFO': r'/path1/path2',
})
@@ -384,7 +384,7 @@
self.assertIsNone(r.reponame)
# Use remote's path for script name, app path
- r = parse(DEFAULT_ENV, altbaseurl='http://altserver/altroot', extra={
+ r = parse(DEFAULT_ENV, altbaseurl=b'http://altserver/altroot', extra={
r'SCRIPT_NAME': r'/script',
r'PATH_INFO': r'/path1/path2',
})
@@ -401,7 +401,7 @@
# reponame is factored in properly.
r = parse(DEFAULT_ENV, reponame=b'repo',
- altbaseurl='http://altserver/altroot',
+ altbaseurl=b'http://altserver/altroot',
extra={
r'SCRIPT_NAME': r'/script',
r'PATH_INFO': r'/repo/path1/path2',