--- a/.hgignore Tue Jun 13 22:24:41 2017 -0400
+++ b/.hgignore Tue Jun 20 16:33:46 2017 -0400
@@ -54,14 +54,6 @@
locale/*/LC_MESSAGES/hg.mo
hgext/__index__.py
-# files installed with a local --pure build
-mercurial/base85.py
-mercurial/bdiff.py
-mercurial/diffhelpers.py
-mercurial/mpatch.py
-mercurial/osutil.py
-mercurial/parsers.py
-
# Generated wheels
wheelhouse/
--- a/Makefile Tue Jun 13 22:24:41 2017 -0400
+++ b/Makefile Tue Jun 20 16:33:46 2017 -0400
@@ -64,7 +64,6 @@
-$(PYTHON) setup.py clean --all # ignore errors from this command
find contrib doc hgext hgext3rd i18n mercurial tests \
\( -name '*.py[cdo]' -o -name '*.so' \) -exec rm -f '{}' ';'
- rm -f $(addprefix mercurial/,$(notdir $(wildcard mercurial/pure/[a-z]*.py)))
rm -f MANIFEST MANIFEST.in hgext/__index__.py tests/*.err
rm -f mercurial/__modulepolicy__.py
if test -d .hg; then rm -f mercurial/__version__.py; fi
@@ -177,6 +176,14 @@
# location of our own.
install -d build/mercurial/usr/local/hg/contrib/
install -m 0644 contrib/bash_completion build/mercurial/usr/local/hg/contrib/hg-completion.bash
+ make -C contrib/chg \
+ HGPATH=/usr/local/bin/hg \
+ PYTHON=/usr/bin/python2.7 \
+ HG=/usr/local/bin/hg \
+ HGEXTDIR=/Library/Python/2.7/site-packages/hgext \
+ DESTDIR=../../build/mercurial \
+ PREFIX=/usr/local \
+ clean install
mkdir -p $${OUTPUTDIR:-dist}
HGVER=$$((cat build/mercurial/Library/Python/2.7/site-packages/mercurial/__version__.py; echo 'print(version)') | python) && \
OSXVER=$$(sw_vers -productVersion | cut -d. -f1,2) && \
--- a/contrib/bdiff-torture.py Tue Jun 13 22:24:41 2017 -0400
+++ b/contrib/bdiff-torture.py Tue Jun 20 16:33:46 2017 -0400
@@ -5,8 +5,7 @@
import sys
from mercurial import (
- bdiff,
- mpatch,
+ mdiff,
)
def reducetest(a, b):
@@ -42,10 +41,10 @@
sys.exit(0)
def test1(a, b):
- d = bdiff.bdiff(a, b)
+ d = mdiff.textdiff(a, b)
if not d:
raise ValueError("empty")
- c = mpatch.patches(a, [d])
+ c = mdiff.patches(a, [d])
if c != b:
raise ValueError("bad")
--- a/contrib/check-code.py Tue Jun 13 22:24:41 2017 -0400
+++ b/contrib/check-code.py Tue Jun 20 16:33:46 2017 -0400
@@ -116,6 +116,7 @@
(r'printf.*[^\\]\\x', "don't use printf \\x, use Python"),
(r'\$\(.*\)', "don't use $(expr), use `expr`"),
(r'rm -rf \*', "don't use naked rm -rf, target a directory"),
+ (r'\[[^\]]+==', '[ foo == bar ] is a bashism, use [ foo = bar ] instead'),
(r'(^|\|\s*)grep (-\w\s+)*[^|]*[(|]\w',
"use egrep for extended grep syntax"),
(r'/bin/', "don't use explicit paths for tools"),
@@ -137,6 +138,7 @@
"put a backslash-escaped newline after sed 'i' command"),
(r'^diff *-\w*[uU].*$\n(^ \$ |^$)', "prefix diff -u/-U with cmp"),
(r'^\s+(if)? diff *-\w*[uU]', "prefix diff -u/-U with cmp"),
+ (r'python\s.+\.py', "don't use 'python', use '$PYTHON'"),
(r'seq ', "don't use 'seq', use $TESTDIR/seq.py"),
(r'\butil\.Abort\b', "directly use error.Abort"),
(r'\|&', "don't use |&, use 2>&1"),
@@ -144,6 +146,7 @@
(r'\bsed\b.*[^\\]\\n', "don't use 'sed ... \\n', use a \\ and a newline"),
(r'env.*-u', "don't use 'env -u VAR', use 'unset VAR'"),
(r'cp.* -r ', "don't use 'cp -r', use 'cp -R'"),
+ (r'grep.* -[ABC] ', "don't use grep's context flags"),
],
# warnings
[
@@ -299,7 +302,8 @@
(r'^\s*(while|if) [01]:',
"use True/False for constant Boolean expression"),
(r'(?:(?<!def)\s+|\()hasattr\(',
- 'hasattr(foo, bar) is broken, use util.safehasattr(foo, bar) instead'),
+ 'hasattr(foo, bar) is broken on py2, use util.safehasattr(foo, bar) '
+ 'instead', r'#.*hasattr-py3-only'),
(r'opener\([^)]*\).read\(',
"use opener.read() instead"),
(r'opener\([^)]*\).write\(',
@@ -338,6 +342,8 @@
(r'^import pickle', "don't use pickle, use util.pickle"),
(r'^import httplib', "don't use httplib, use util.httplib"),
(r'^import BaseHTTPServer', "use util.httpserver instead"),
+ (r'^(from|import) mercurial\.(cext|pure|cffi)',
+ "use mercurial.policy.importmod instead"),
(r'\.next\(\)', "don't use .next(), use next(...)"),
(r'([a-z]*).revision\(\1\.node\(',
"don't convert rev to node before passing to revision(nodeorrev)"),
@@ -474,7 +480,7 @@
py3pats = [
[
- (r'os\.environ', "use encoding.environ instead (py3)"),
+ (r'os\.environ', "use encoding.environ instead (py3)", r'#.*re-exports'),
(r'os\.name', "use pycompat.osname instead (py3)"),
(r'os\.getcwd', "use pycompat.getcwd instead (py3)"),
(r'os\.sep', "use pycompat.ossep instead (py3)"),
@@ -492,8 +498,8 @@
checks = [
('python', r'.*\.(py|cgi)$', r'^#!.*python', pyfilters, pypats),
('python', r'.*hgext.*\.py$', '', [], pyextnfpats),
- ('python 3', r'.*(hgext|mercurial).*(?<!pycompat)\.py', '',
- pyfilters, py3pats),
+ ('python 3', r'.*(hgext|mercurial)/(?!demandimport|policy|pycompat).*\.py',
+ '', pyfilters, py3pats),
('test script', r'(.*/)?test-[^.~]*$', '', testfilters, testpats),
('c', r'.*\.[ch]$', '', cfilters, cpats),
('unified test', r'.*\.t$', '', utestfilters, utestpats),
--- a/contrib/check-config.py Tue Jun 13 22:24:41 2017 -0400
+++ b/contrib/check-config.py Tue Jun 20 16:33:46 2017 -0400
@@ -14,8 +14,26 @@
foundopts = {}
documented = {}
-configre = (r"""ui\.config(|int|bool|list)\(['"](\S+)['"],\s*"""
- r"""['"](\S+)['"](,\s+(?:default=)?(\S+?))?\)""")
+configre = re.compile(r'''
+ # Function call
+ ui\.config(?P<ctype>|int|bool|list)\(
+ # First argument.
+ ['"](?P<section>\S+)['"],\s*
+ # Second argument
+ ['"](?P<option>\S+)['"](,\s+
+ (?:default=)?(?P<default>\S+?))?
+ \)''', re.VERBOSE | re.MULTILINE)
+
+configwithre = re.compile('''
+ ui\.config(?P<ctype>with)\(
+ # First argument is callback function. This doesn't parse robustly
+ # if it is e.g. a function call.
+ [^,]+,\s*
+ ['"](?P<section>\S+)['"],\s*
+ ['"](?P<option>\S+)['"](,\s+
+ (?:default=)?(?P<default>\S+?))?
+ \)''', re.VERBOSE | re.MULTILINE)
+
configpartialre = (r"""ui\.config""")
def main(args):
@@ -71,13 +89,13 @@
# look for code-like bits
line = carryover + l
- m = re.search(configre, line, re.MULTILINE)
+ m = configre.search(line) or configwithre.search(line)
if m:
- ctype = m.group(1)
+ ctype = m.group('ctype')
if not ctype:
ctype = 'str'
- name = m.group(2) + "." + m.group(3)
- default = m.group(5)
+ name = m.group('section') + "." + m.group('option')
+ default = m.group('default')
if default in (None, 'False', 'None', '0', '[]', '""', "''"):
default = ''
if re.match('[a-z.]+$', default):
--- a/contrib/check-py3-compat.py Tue Jun 13 22:24:41 2017 -0400
+++ b/contrib/check-py3-compat.py Tue Jun 20 16:33:46 2017 -0400
@@ -10,6 +10,7 @@
from __future__ import absolute_import, print_function
import ast
+import importlib
import os
import sys
import traceback
@@ -40,7 +41,6 @@
def check_compat_py3(f):
"""Check Python 3 compatibility of a file with Python 3."""
- import importlib # not available on Python 2.6
with open(f, 'rb') as fh:
content = fh.read()
@@ -51,11 +51,12 @@
return
# Try to import the module.
- # For now we only support mercurial.* and hgext.* modules because figuring
- # out module paths for things not in a package can be confusing.
- if f.startswith(('hgext/', 'mercurial/')) and not f.endswith('__init__.py'):
+ # For now we only support modules in packages because figuring out module
+ # paths for things not in a package can be confusing.
+ if (f.startswith(('hgdemandimport/', 'hgext/', 'mercurial/'))
+ and not f.endswith('__init__.py')):
assert f.endswith('.py')
- name = f.replace('/', '.')[:-3].replace('.pure.', '.')
+ name = f.replace('/', '.')[:-3]
try:
importlib.import_module(name)
except Exception as e:
--- a/contrib/debian/control Tue Jun 13 22:24:41 2017 -0400
+++ b/contrib/debian/control Tue Jun 20 16:33:46 2017 -0400
@@ -13,7 +13,7 @@
unzip,
zip
Standards-Version: 3.9.4
-X-Python-Version: >= 2.6
+X-Python-Version: >= 2.7
Package: mercurial
Depends:
--- a/contrib/debugshell.py Tue Jun 13 22:24:41 2017 -0400
+++ b/contrib/debugshell.py Tue Jun 20 16:33:46 2017 -0400
@@ -6,12 +6,12 @@
import mercurial
import sys
from mercurial import (
- cmdutil,
demandimport,
+ registrar,
)
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
def pdb(ui, repo, msg, **opts):
objects = {
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/editmergeps.bat Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,2 @@
+@echo off
+powershell -NoProfile -ExecutionPolicy unrestricted -Command "& '%~dp0\editmergeps.ps1' %*"
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/editmergeps.ps1 Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,78 @@
+# A simple script for opening merge conflicts in editor
+# A loose translation of contrib/editmerge to powershell
+# Please make sure that both editmergeps.bat and editmerge.ps1 are available
+# via %PATH% and use the following Mercurial settings to enable it
+#
+# [ui]
+# editmergeps
+# editmergeps.args=$output
+# editmergeps.check=changed
+# editmergeps.premerge=keep
+
+$file=$args[0]
+
+function Get-Lines
+{
+ Select-String "^<<<<<<" $file | % {"$($_.LineNumber)"}
+}
+
+$ed = $Env:HGEDITOR;
+if ($ed -eq $nil)
+{
+ $ed = $Env:VISUAL;
+}
+if ($ed -eq $nil)
+{
+ $ed = $Env:EDITOR;
+}
+if ($ed -eq $nil)
+{
+ $ed = $(hg showconfig ui.editor);
+}
+if ($ed -eq $nil)
+{
+ Write-Error "merge failed - unable to find editor"
+ exit 1
+}
+
+if (($ed -eq "vim") -or ($ed -eq "emacs") -or `
+ ($ed -eq "nano") -or ($ed -eq "notepad++"))
+{
+ $lines = Get-Lines
+ $firstline = if ($lines.Length -gt 0) { $lines[0] } else { $nil }
+ $previousline = $nil;
+
+
+ # open the editor to the first conflict until there are no more
+ # or the user stops editing the file
+ while (($firstline -ne $nil) -and ($firstline -ne $previousline))
+ {
+ if ($ed -eq "notepad++")
+ {
+ $linearg = "-n$firstline"
+ }
+ else
+ {
+ $linearg = "+$firstline"
+ }
+
+ Start-Process -Wait -NoNewWindow $ed $linearg,$file
+ $previousline = $firstline
+ $lines = Get-Lines
+ $firstline = if ($lines.Length -gt 0) { $lines[0] } else { $nil }
+ }
+}
+else
+{
+ & "$ed" $file
+}
+
+$conflicts=Get-Lines
+if ($conflicts.Length -ne 0)
+{
+ Write-Output "merge failed - resolve the conflicts (line $conflicts) then use 'hg resolve --mark'"
+ exit 1
+}
+
+exit 0
+
--- a/contrib/import-checker.py Tue Jun 13 22:24:41 2017 -0400
+++ b/contrib/import-checker.py Tue Jun 20 16:33:46 2017 -0400
@@ -22,6 +22,18 @@
'mercurial.hgweb.request',
'mercurial.i18n',
'mercurial.node',
+ # for cffi modules to re-export pure functions
+ 'mercurial.pure.base85',
+ 'mercurial.pure.bdiff',
+ 'mercurial.pure.diffhelpers',
+ 'mercurial.pure.mpatch',
+ 'mercurial.pure.osutil',
+ 'mercurial.pure.parsers',
+)
+
+# Whitelist of symbols that can be directly imported.
+directsymbols = (
+ 'demandimport',
)
# Modules that must be aliased because they are commonly confused with
@@ -55,13 +67,11 @@
todo.extend(ast.iter_child_nodes(node))
yield node, newscope
-def dotted_name_of_path(path, trimpure=False):
+def dotted_name_of_path(path):
"""Given a relative path to a source file, return its dotted module name.
>>> dotted_name_of_path('mercurial/error.py')
'mercurial.error'
- >>> dotted_name_of_path('mercurial/pure/parsers.py', trimpure=True)
- 'mercurial.parsers'
>>> dotted_name_of_path('zlibmodule.so')
'zlib'
"""
@@ -69,8 +79,6 @@
parts[-1] = parts[-1].split('.', 1)[0] # remove .py and .so and .ARCH.so
if parts[-1].endswith('module'):
parts[-1] = parts[-1][:-6]
- if trimpure:
- return '.'.join(p for p in parts if p != 'pure')
return '.'.join(parts)
def fromlocalfunc(modulename, localmods):
@@ -80,9 +88,8 @@
`modulename` is an `dotted_name_of_path()`-ed source file path,
which may have `.__init__` at the end of it, of the target source.
- `localmods` is a dict (or set), of which key is an absolute
- `dotted_name_of_path()`-ed source file path of locally defined (=
- Mercurial specific) modules.
+ `localmods` is a set of absolute `dotted_name_of_path()`-ed source file
+ paths of locally defined (= Mercurial specific) modules.
This function assumes that module names not existing in
`localmods` are from the Python standard library.
@@ -106,9 +113,9 @@
convenient, even though this is also equivalent to "absname !=
dottednpath")
- >>> localmods = {'foo.__init__': True, 'foo.foo1': True,
- ... 'foo.bar.__init__': True, 'foo.bar.bar1': True,
- ... 'baz.__init__': True, 'baz.baz1': True }
+ >>> localmods = {'foo.__init__', 'foo.foo1',
+ ... 'foo.bar.__init__', 'foo.bar.bar1',
+ ... 'baz.__init__', 'baz.baz1'}
>>> fromlocal = fromlocalfunc('foo.xxx', localmods)
>>> # relative
>>> fromlocal('foo1')
@@ -163,6 +170,16 @@
return False
return fromlocal
+def populateextmods(localmods):
+ """Populate C extension modules based on pure modules"""
+ newlocalmods = set(localmods)
+ for n in localmods:
+ if n.startswith('mercurial.pure.'):
+ m = n[len('mercurial.pure.'):]
+ newlocalmods.add('mercurial.cext.' + m)
+ newlocalmods.add('mercurial.cffi._' + m)
+ return newlocalmods
+
def list_stdlib_modules():
"""List the modules present in the stdlib.
@@ -203,7 +220,7 @@
yield m
for m in ['cffi']:
yield m
- stdlib_prefixes = set([sys.prefix, sys.exec_prefix])
+ stdlib_prefixes = {sys.prefix, sys.exec_prefix}
# We need to supplement the list of prefixes for the search to work
# when run from within a virtualenv.
for mod in (BaseHTTPServer, zlib):
@@ -227,7 +244,8 @@
for top, dirs, files in os.walk(libpath):
for i, d in reversed(list(enumerate(dirs))):
if (not os.path.exists(os.path.join(top, d, '__init__.py'))
- or top == libpath and d in ('hgext', 'mercurial')):
+ or top == libpath and d in ('hgdemandimport', 'hgext',
+ 'mercurial')):
del dirs[i]
for name in files:
if not name.endswith(('.py', '.so', '.pyc', '.pyo', '.pyd')):
@@ -249,7 +267,7 @@
Args:
source: The python source to examine as a string.
modulename: of specified python source (may have `__init__`)
- localmods: dict of locally defined module names (may have `__init__`)
+ localmods: set of locally defined module names (may have `__init__`)
ignore_nested: If true, import statements that do not start in
column zero will be ignored.
@@ -468,10 +486,11 @@
found = fromlocal(node.module, node.level)
if found and found[2]: # node.module is a package
prefix = found[0] + '.'
- symbols = [n.name for n in node.names
- if not fromlocal(prefix + n.name)]
+ symbols = (n.name for n in node.names
+ if not fromlocal(prefix + n.name))
else:
- symbols = [n.name for n in node.names]
+ symbols = (n.name for n in node.names)
+ symbols = [sym for sym in symbols if sym not in directsymbols]
if node.module and node.col_offset == root_col_offset:
if symbols and fullname not in allowsymbolimports:
yield msg('direct symbol import %s from %s',
@@ -687,13 +706,14 @@
if argv[1] == '-':
argv = argv[:1]
argv.extend(l.rstrip() for l in sys.stdin.readlines())
- localmods = {}
+ localmodpaths = {}
used_imports = {}
any_errors = False
for source_path in argv[1:]:
- modname = dotted_name_of_path(source_path, trimpure=True)
- localmods[modname] = source_path
- for localmodname, source_path in sorted(localmods.items()):
+ modname = dotted_name_of_path(source_path)
+ localmodpaths[modname] = source_path
+ localmods = populateextmods(localmodpaths)
+ for localmodname, source_path in sorted(localmodpaths.items()):
for src, modname, name, line in sources(source_path, localmodname):
try:
used_imports[modname] = sorted(
--- a/contrib/mercurial.spec Tue Jun 13 22:24:41 2017 -0400
+++ b/contrib/mercurial.spec Tue Jun 20 16:33:46 2017 -0400
@@ -37,8 +37,8 @@
%if "%{?withpython}"
BuildRequires: readline-devel, openssl-devel, ncurses-devel, zlib-devel, bzip2-devel
%else
-BuildRequires: python >= 2.6, python-devel, python-docutils >= 0.5
-Requires: python >= 2.6
+BuildRequires: python >= 2.7, python-devel, python-docutils >= 0.5
+Requires: python >= 2.7
%endif
# The hgk extension uses the wish tcl interpreter, but we don't enforce it
#Requires: tk
@@ -153,10 +153,9 @@
%{_bindir}/%{pythonhg}
%{hgpyprefix}
%else
-%if "%{?pythonver}" != "2.4"
%{_libdir}/python%{pythonver}/site-packages/%{name}-*-py%{pythonver}.egg-info
-%endif
%{_libdir}/python%{pythonver}/site-packages/%{name}
%{_libdir}/python%{pythonver}/site-packages/hgext
%{_libdir}/python%{pythonver}/site-packages/hgext3rd
+%{_libdir}/python%{pythonver}/site-packages/hgdemandimport
%endif
--- a/contrib/packagelib.sh Tue Jun 13 22:24:41 2017 -0400
+++ b/contrib/packagelib.sh Tue Jun 20 16:33:46 2017 -0400
@@ -14,7 +14,7 @@
$HG version > /dev/null || { echo 'abort: hg version failed!'; exit 1 ; }
- hgversion=`$HG version | sed -ne 's/.*(version \(.*\))$/\1/p'`
+ hgversion=`LANGUAGE=C $HG version | sed -ne 's/.*(version \(.*\))$/\1/p'`
if echo $hgversion | grep + > /dev/null 2>&1 ; then
tmp=`echo $hgversion | cut -d+ -f 2`
--- a/contrib/perf.py Tue Jun 13 22:24:41 2017 -0400
+++ b/contrib/perf.py Tue Jun 20 16:33:46 2017 -0400
@@ -23,10 +23,10 @@
import gc
import os
import random
+import struct
import sys
import time
from mercurial import (
- bdiff,
changegroup,
cmdutil,
commands,
@@ -35,6 +35,7 @@
extensions,
mdiff,
merge,
+ revlog,
util,
)
@@ -50,6 +51,11 @@
except ImportError:
pass
try:
+ from mercurial import registrar # since 3.7 (or 37d50250b696)
+ dir(registrar) # forcibly load it
+except ImportError:
+ registrar = None
+try:
from mercurial import repoview # since 2.5 (or 3a6ddacb7198)
except ImportError:
pass
@@ -81,18 +87,20 @@
# available, because commands.formatteropts has been available since
# 3.2 (or 7a7eed5176a4), even though formatting itself has been
# available since 2.2 (or ae5f92e154d3)
-formatteropts = getattr(commands, "formatteropts", [])
+formatteropts = getattr(cmdutil, "formatteropts",
+ getattr(commands, "formatteropts", []))
# for "historical portability":
# use locally defined option list, if debugrevlogopts isn't available,
# because commands.debugrevlogopts has been available since 3.7 (or
# 5606f7d0d063), even though cmdutil.openrevlog() has been available
# since 1.9 (or a79fea6b3e77).
-revlogopts = getattr(commands, "debugrevlogopts", [
+revlogopts = getattr(cmdutil, "debugrevlogopts",
+ getattr(commands, "debugrevlogopts", [
('c', 'changelog', False, ('open changelog')),
('m', 'manifest', False, ('open manifest')),
('', 'dir', False, ('open directory manifest')),
- ])
+ ]))
cmdtable = {}
@@ -102,7 +110,9 @@
def parsealiases(cmd):
return cmd.lstrip("^").split("|")
-if safehasattr(cmdutil, 'command'):
+if safehasattr(registrar, 'command'):
+ command = registrar.command(cmdtable)
+elif safehasattr(cmdutil, 'command'):
import inspect
command = cmdutil.command(cmdtable)
if 'norepo' not in inspect.getargspec(command)[0]:
@@ -347,6 +357,14 @@
# - perf.py itself has been available since 1.1 (or eb240755386d)
raise error.Abort(("tags API of this hg command is unknown"))
+# utilities to clear cache
+
+def clearfilecache(repo, attrname):
+ unfi = repo.unfiltered()
+ if attrname in vars(unfi):
+ delattr(unfi, attrname)
+ unfi._filecache.pop(attrname, None)
+
# perf commands
@command('perfwalk', formatteropts)
@@ -449,6 +467,16 @@
timer(d)
fm.end()
+@command('perfbookmarks', formatteropts)
+def perfbookmarks(ui, repo, **opts):
+ """benchmark parsing bookmarks from disk to memory"""
+ timer, fm = gettimer(ui, opts)
+ def d():
+ clearfilecache(repo, '_bookmarks')
+ repo._bookmarks
+ timer(d)
+ fm.end()
+
@command('perfchangegroupchangelog', formatteropts +
[('', 'version', '02', 'changegroup version'),
('r', 'rev', '', 'revisions to add to changegroup')])
@@ -573,6 +601,24 @@
timer(d)
fm.end()
+@command('perfphases',
+ [('', 'full', False, 'include file reading time too'),
+ ], "")
+def perfphases(ui, repo, **opts):
+ """benchmark phasesets computation"""
+ timer, fm = gettimer(ui, opts)
+ _phases = repo._phasecache
+ full = opts.get('full')
+ def d():
+ phases = _phases
+ if full:
+ clearfilecache(repo, '_phasecache')
+ phases = repo._phasecache
+ phases.invalidate()
+ phases.loadphaserevs(repo)
+ timer(d)
+ fm.end()
+
@command('perfmanifest', [], 'REV')
def perfmanifest(ui, repo, rev, **opts):
timer, fm = gettimer(ui, opts)
@@ -615,7 +661,7 @@
if os.name != 'nt':
os.system("HGRCPATH= %s version -q > /dev/null" % cmd)
else:
- os.environ['HGRCPATH'] = ''
+ os.environ['HGRCPATH'] = ' '
os.system("%s version -q > NUL" % cmd)
timer(d)
fm.end()
@@ -812,7 +858,7 @@
def d():
for pair in textpairs:
- bdiff.bdiff(*pair)
+ mdiff.textdiff(*pair)
timer, fm = gettimer(ui, opts)
timer(d)
@@ -838,12 +884,129 @@
timer(d, title)
fm.end()
-@command('perfrevlog', revlogopts + formatteropts +
+@command('perfrevlogindex', revlogopts + formatteropts,
+ '-c|-m|FILE')
+def perfrevlogindex(ui, repo, file_=None, **opts):
+ """Benchmark operations against a revlog index.
+
+ This tests constructing a revlog instance, reading index data,
+ parsing index data, and performing various operations related to
+ index data.
+ """
+
+ rl = cmdutil.openrevlog(repo, 'perfrevlogindex', file_, opts)
+
+ opener = getattr(rl, 'opener') # trick linter
+ indexfile = rl.indexfile
+ data = opener.read(indexfile)
+
+ header = struct.unpack('>I', data[0:4])[0]
+ version = header & 0xFFFF
+ if version == 1:
+ revlogio = revlog.revlogio()
+ inline = header & (1 << 16)
+ else:
+ raise error.Abort(('unsupported revlog version: %d') % version)
+
+ rllen = len(rl)
+
+ node0 = rl.node(0)
+ node25 = rl.node(rllen // 4)
+ node50 = rl.node(rllen // 2)
+ node75 = rl.node(rllen // 4 * 3)
+ node100 = rl.node(rllen - 1)
+
+ allrevs = range(rllen)
+ allrevsrev = list(reversed(allrevs))
+ allnodes = [rl.node(rev) for rev in range(rllen)]
+ allnodesrev = list(reversed(allnodes))
+
+ def constructor():
+ revlog.revlog(opener, indexfile)
+
+ def read():
+ with opener(indexfile) as fh:
+ fh.read()
+
+ def parseindex():
+ revlogio.parseindex(data, inline)
+
+ def getentry(revornode):
+ index = revlogio.parseindex(data, inline)[0]
+ index[revornode]
+
+ def getentries(revs, count=1):
+ index = revlogio.parseindex(data, inline)[0]
+
+ for i in range(count):
+ for rev in revs:
+ index[rev]
+
+ def resolvenode(node):
+ nodemap = revlogio.parseindex(data, inline)[1]
+ # This only works for the C code.
+ if nodemap is None:
+ return
+
+ try:
+ nodemap[node]
+ except error.RevlogError:
+ pass
+
+ def resolvenodes(nodes, count=1):
+ nodemap = revlogio.parseindex(data, inline)[1]
+ if nodemap is None:
+ return
+
+ for i in range(count):
+ for node in nodes:
+ try:
+ nodemap[node]
+ except error.RevlogError:
+ pass
+
+ benches = [
+ (constructor, 'revlog constructor'),
+ (read, 'read'),
+ (parseindex, 'create index object'),
+ (lambda: getentry(0), 'retrieve index entry for rev 0'),
+ (lambda: resolvenode('a' * 20), 'look up missing node'),
+ (lambda: resolvenode(node0), 'look up node at rev 0'),
+ (lambda: resolvenode(node25), 'look up node at 1/4 len'),
+ (lambda: resolvenode(node50), 'look up node at 1/2 len'),
+ (lambda: resolvenode(node75), 'look up node at 3/4 len'),
+ (lambda: resolvenode(node100), 'look up node at tip'),
+ # 2x variation is to measure caching impact.
+ (lambda: resolvenodes(allnodes),
+ 'look up all nodes (forward)'),
+ (lambda: resolvenodes(allnodes, 2),
+ 'look up all nodes 2x (forward)'),
+ (lambda: resolvenodes(allnodesrev),
+ 'look up all nodes (reverse)'),
+ (lambda: resolvenodes(allnodesrev, 2),
+ 'look up all nodes 2x (reverse)'),
+ (lambda: getentries(allrevs),
+ 'retrieve all index entries (forward)'),
+ (lambda: getentries(allrevs, 2),
+ 'retrieve all index entries 2x (forward)'),
+ (lambda: getentries(allrevsrev),
+ 'retrieve all index entries (reverse)'),
+ (lambda: getentries(allrevsrev, 2),
+ 'retrieve all index entries 2x (reverse)'),
+ ]
+
+ for fn, title in benches:
+ timer, fm = gettimer(ui, opts)
+ timer(fn, title=title)
+ fm.end()
+
+@command('perfrevlogrevisions', revlogopts + formatteropts +
[('d', 'dist', 100, 'distance between the revisions'),
('s', 'startrev', 0, 'revision to start reading at'),
('', 'reverse', False, 'read in reverse')],
'-c|-m|FILE')
-def perfrevlog(ui, repo, file_=None, startrev=0, reverse=False, **opts):
+def perfrevlogrevisions(ui, repo, file_=None, startrev=0, reverse=False,
+ **opts):
"""Benchmark reading a series of revisions from a revlog.
By default, we read every ``-d/--dist`` revision from 0 to tip of
@@ -851,23 +1014,26 @@
The start revision can be defined via ``-s/--startrev``.
"""
- timer, fm = gettimer(ui, opts)
- _len = getlen(ui)
+ rl = cmdutil.openrevlog(repo, 'perfrevlogrevisions', file_, opts)
+ rllen = getlen(ui)(rl)
def d():
- r = cmdutil.openrevlog(repo, 'perfrevlog', file_, opts)
+ rl.clearcaches()
- startrev = 0
- endrev = _len(r)
+ beginrev = startrev
+ endrev = rllen
dist = opts['dist']
if reverse:
- startrev, endrev = endrev, startrev
+ beginrev, endrev = endrev, beginrev
dist = -1 * dist
- for x in xrange(startrev, endrev, dist):
- r.revision(r.node(x))
+ for x in xrange(beginrev, endrev, dist):
+ # Old revisions don't support passing int.
+ n = rl.node(x)
+ rl.revision(n)
+ timer, fm = gettimer(ui, opts)
timer(d)
fm.end()
@@ -885,10 +1051,16 @@
This command measures the time it takes to read+decompress and recompress
chunks in a revlog. It effectively isolates I/O and compression performance.
For measurements of higher-level operations like resolving revisions,
- see ``perfrevlog`` and ``perfrevlogrevision``.
+ see ``perfrevlogrevisions`` and ``perfrevlogrevision``.
"""
rl = cmdutil.openrevlog(repo, 'perfrevlogchunks', file_, opts)
+ # _chunkraw was renamed to _getsegmentforrevs.
+ try:
+ segmentforrevs = rl._getsegmentforrevs
+ except AttributeError:
+ segmentforrevs = rl._chunkraw
+
# Verify engines argument.
if engines:
engines = set(e.strip() for e in engines.split(','))
@@ -919,22 +1091,22 @@
def doread():
rl.clearcaches()
for rev in revs:
- rl._chunkraw(rev, rev)
+ segmentforrevs(rev, rev)
def doreadcachedfh():
rl.clearcaches()
fh = rlfh(rl)
for rev in revs:
- rl._chunkraw(rev, rev, df=fh)
+ segmentforrevs(rev, rev, df=fh)
def doreadbatch():
rl.clearcaches()
- rl._chunkraw(revs[0], revs[-1])
+ segmentforrevs(revs[0], revs[-1])
def doreadbatchcachedfh():
rl.clearcaches()
fh = rlfh(rl)
- rl._chunkraw(revs[0], revs[-1], df=fh)
+ segmentforrevs(revs[0], revs[-1], df=fh)
def dochunk():
rl.clearcaches()
@@ -1003,6 +1175,13 @@
raise error.CommandError('perfrevlogrevision', 'invalid arguments')
r = cmdutil.openrevlog(repo, 'perfrevlogrevision', file_, opts)
+
+ # _chunkraw was renamed to _getsegmentforrevs.
+ try:
+ segmentforrevs = r._getsegmentforrevs
+ except AttributeError:
+ segmentforrevs = r._chunkraw
+
node = r.lookup(rev)
rev = r.rev(node)
@@ -1034,7 +1213,7 @@
def doread(chain):
if not cache:
r.clearcaches()
- r._chunkraw(chain[0], chain[-1])
+ segmentforrevs(chain[0], chain[-1])
def dorawchunks(data, chain):
if not cache:
@@ -1062,7 +1241,7 @@
r.revision(node)
chain = r._deltachain(rev)[0]
- data = r._chunkraw(chain[0], chain[-1])[1]
+ data = segmentforrevs(chain[0], chain[-1])[1]
rawchunks = getrawchunks(data, chain)
bins = r._chunks(chain)
text = str(bins[0])
@@ -1105,7 +1284,9 @@
timer(d)
fm.end()
-@command('perfvolatilesets', formatteropts)
+@command('perfvolatilesets',
+ [('', 'clear-obsstore', False, 'drop obsstore between each call.'),
+ ] + formatteropts)
def perfvolatilesets(ui, repo, *names, **opts):
"""benchmark the computation of various volatile set
@@ -1116,6 +1297,8 @@
def getobs(name):
def d():
repo.invalidatevolatilesets()
+ if opts['clear_obsstore']:
+ clearfilecache(repo, 'obsstore')
obsolete.getrevs(repo, name)
return d
@@ -1129,6 +1312,8 @@
def getfiltered(name):
def d():
repo.invalidatevolatilesets()
+ if opts['clear_obsstore']:
+ clearfilecache(repo, 'obsstore')
repoview.filterrevs(repo, name)
return d
@@ -1143,8 +1328,10 @@
@command('perfbranchmap',
[('f', 'full', False,
'Includes build time of subset'),
+ ('', 'clear-revbranch', False,
+ 'purge the revbranch cache between computation'),
] + formatteropts)
-def perfbranchmap(ui, repo, full=False, **opts):
+def perfbranchmap(ui, repo, full=False, clear_revbranch=False, **opts):
"""benchmark the update of a branchmap
This benchmarks the full repo.branchmap() call with read and write disabled
@@ -1157,6 +1344,8 @@
else:
view = repo.filtered(filtername)
def d():
+ if clear_revbranch:
+ repo.revbranchcache()._clear()
if full:
view._branchcaches.clear()
else:
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/python3-ratchet.py Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,116 @@
+# Copyright 2012 Facebook
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+"""Find tests that newly pass under Python 3.
+
+The approach is simple: we maintain a whitelist of Python 3 passing
+tests in the repository, and periodically run all the /other/ tests
+and look for new passes. Any newly passing tests get automatically
+added to the whitelist.
+
+You probably want to run it like this:
+
+ $ cd tests
+ $ python3 ../contrib/python3-ratchet.py \
+ > --working-tests=../contrib/python3-whitelist
+"""
+from __future__ import print_function
+from __future__ import absolute_import
+
+import argparse
+import json
+import os
+import subprocess
+import sys
+
+_hgenv = dict(os.environ)
+_hgenv.update({
+ 'HGPLAIN': '1',
+ })
+
+_HG_FIRST_CHANGE = '9117c6561b0bd7792fa13b50d28239d51b78e51f'
+
+def _runhg(*args):
+ return subprocess.check_output(args, env=_hgenv)
+
+def _is_hg_repo(path):
+ return _runhg('hg', 'log', '-R', path,
+ '-r0', '--template={node}').strip() == _HG_FIRST_CHANGE
+
+def _py3default():
+ if sys.version_info[0] >= 3:
+ return sys.executable
+ return 'python3'
+
+def main(argv=()):
+ p = argparse.ArgumentParser()
+ p.add_argument('--working-tests',
+ help='List of tests that already work in Python 3.')
+ p.add_argument('--commit-to-repo',
+ help='If set, commit newly fixed tests to the given repo')
+ p.add_argument('-j', default=os.sysconf(r'SC_NPROCESSORS_ONLN'), type=int,
+ help='Number of parallel tests to run.')
+ p.add_argument('--python3', default=_py3default(),
+ help='python3 interpreter to use for test run')
+ p.add_argument('--commit-user',
+ default='python3-ratchet@mercurial-scm.org',
+ help='Username to specify when committing to a repo.')
+ opts = p.parse_args(argv)
+ if opts.commit_to_repo:
+ if not _is_hg_repo(opts.commit_to_repo):
+ print('abort: specified repository is not the hg repository')
+ sys.exit(1)
+ if not opts.working_tests or not os.path.isfile(opts.working_tests):
+ print('abort: --working-tests must exist and be a file (got %r)' %
+ opts.working_tests)
+ sys.exit(1)
+ elif opts.commit_to_repo:
+ root = _runhg('hg', 'root').strip()
+ if not opts.working_tests.startswith(root):
+ print('abort: if --commit-to-repo is given, '
+ '--working-tests must be from that repo')
+ sys.exit(1)
+ try:
+ subprocess.check_call([opts.python3, '-c',
+ 'import sys ; '
+ 'assert ((3, 5) <= sys.version_info < (3, 6) '
+ 'or sys.version_info >= (3, 6, 2))'])
+ except subprocess.CalledProcessError:
+ print('warning: Python 3.6.0 and 3.6.1 have '
+ 'a bug which breaks Mercurial')
+ print('(see https://bugs.python.org/issue29714 for details)')
+ # TODO(augie): uncomment exit when Python 3.6.2 is available
+ # sys.exit(1)
+
+ rt = subprocess.Popen([opts.python3, 'run-tests.py', '-j', str(opts.j),
+ '--blacklist', opts.working_tests, '--json'])
+ rt.wait()
+ with open('report.json') as f:
+ data = f.read()
+ report = json.loads(data.split('=', 1)[1])
+ newpass = set()
+ for test, result in report.items():
+ if result['result'] != 'success':
+ continue
+ # A new passing test! Huzzah!
+ newpass.add(test)
+ if newpass:
+ # We already validated the repo, so we can just dive right in
+ # and commit.
+ if opts.commit_to_repo:
+ print(len(newpass), 'new passing tests on Python 3!')
+ with open(opts.working_tests) as f:
+ oldpass = {l for l in f.read().splitlines() if l}
+ with open(opts.working_tests, 'w') as f:
+ for p in sorted(oldpass | newpass):
+ f.write('%s\n' % p)
+ _runhg('hg', 'commit', '-R', opts.commit_to_repo,
+ '--user', opts.commit_user,
+ '--message', 'python3: expand list of passing tests')
+ else:
+ print('Newly passing tests:', '\n'.join(sorted(newpass)))
+ sys.exit(2)
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/python3-whitelist Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,11 @@
+test-ancestor.py
+test-check-execute.t
+test-check-shbang.t
+test-contrib-check-code.t
+test-contrib-check-commit.t
+test-ctxmanager.py
+test-doctest.py
+test-lrucachedict.py
+test-run-tests.py
+test-unified-test.t
+test-xdg.t
--- a/contrib/synthrepo.py Tue Jun 13 22:24:41 2017 -0400
+++ b/contrib/synthrepo.py Tue Jun 20 16:33:46 2017 -0400
@@ -53,11 +53,11 @@
short,
)
from mercurial import (
- cmdutil,
context,
error,
hg,
patch,
+ registrar,
scmutil,
util,
)
@@ -69,9 +69,9 @@
testedwith = 'ships-with-hg-core'
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
-newfile = set(('new fi', 'rename', 'copy f', 'copy t'))
+newfile = {'new fi', 'rename', 'copy f', 'copy t'}
def zerodict():
return collections.defaultdict(lambda: 0)
@@ -336,7 +336,7 @@
wlock = repo.wlock()
lock = repo.lock()
- nevertouch = set(('.hgsub', '.hgignore', '.hgtags'))
+ nevertouch = {'.hgsub', '.hgignore', '.hgtags'}
progress = ui.progress
_synthesizing = _('synthesizing')
--- a/contrib/wix/dist.wxs Tue Jun 13 22:24:41 2017 -0400
+++ b/contrib/wix/dist.wxs Tue Jun 20 16:33:46 2017 -0400
@@ -12,12 +12,12 @@
<Directory Id="libdir" Name="lib" FileSource="$(var.SourceDir)/lib">
<Component Id="libOutput" Guid="$(var.lib.guid)" Win64='$(var.IsX64)'>
<File Name="library.zip" KeyPath="yes" />
- <File Name="mercurial.base85.pyd" />
- <File Name="mercurial.bdiff.pyd" />
- <File Name="mercurial.diffhelpers.pyd" />
- <File Name="mercurial.mpatch.pyd" />
- <File Name="mercurial.osutil.pyd" />
- <File Name="mercurial.parsers.pyd" />
+ <File Name="mercurial.cext.base85.pyd" />
+ <File Name="mercurial.cext.bdiff.pyd" />
+ <File Name="mercurial.cext.diffhelpers.pyd" />
+ <File Name="mercurial.cext.mpatch.pyd" />
+ <File Name="mercurial.cext.osutil.pyd" />
+ <File Name="mercurial.cext.parsers.pyd" />
<File Name="mercurial.zstd.pyd" />
<File Name="pyexpat.pyd" />
<File Name="bz2.pyd" />
--- a/doc/check-seclevel.py Tue Jun 13 22:24:41 2017 -0400
+++ b/doc/check-seclevel.py Tue Jun 20 16:33:46 2017 -0400
@@ -23,11 +23,11 @@
table = commands.table
helptable = help.helptable
-level2mark = ['"', '=', '-', '.', '#']
-reservedmarks = ['"']
+level2mark = [b'"', b'=', b'-', b'.', b'#']
+reservedmarks = [b'"']
mark2level = {}
-for m, l in zip(level2mark, xrange(len(level2mark))):
+for m, l in zip(level2mark, range(len(level2mark))):
if m not in reservedmarks:
mark2level[m] = l
@@ -37,22 +37,25 @@
initlevel_ext_cmd = 3
def showavailables(ui, initlevel):
- ui.warn((' available marks and order of them in this help: %s\n') %
- (', '.join(['%r' % (m * 4) for m in level2mark[initlevel + 1:]])))
+ avail = (' available marks and order of them in this help: %s\n') % (
+ ', '.join(['%r' % (m * 4) for m in level2mark[initlevel + 1:]]))
+ ui.warn(avail.encode('utf-8'))
def checkseclevel(ui, doc, name, initlevel):
ui.note(('checking "%s"\n') % name)
+ if not isinstance(doc, bytes):
+ doc = doc.encode('utf-8')
blocks, pruned = minirst.parse(doc, 0, ['verbose'])
errorcnt = 0
curlevel = initlevel
for block in blocks:
- if block['type'] != 'section':
+ if block[b'type'] != b'section':
continue
- mark = block['underline']
- title = block['lines'][0]
+ mark = block[b'underline']
+ title = block[b'lines'][0]
if (mark not in mark2level) or (mark2level[mark] <= initlevel):
- ui.warn(('invalid section mark %r for "%s" of %s\n') %
- (mark * 4, title, name))
+ ui.warn((('invalid section mark %r for "%s" of %s\n') %
+ (mark * 4, title, name)).encode('utf-8'))
showavailables(ui, initlevel)
errorcnt += 1
continue
@@ -72,7 +75,7 @@
def checkcmdtable(ui, cmdtable, namefmt, initlevel):
errorcnt = 0
for k, entry in cmdtable.items():
- name = k.split("|")[0].lstrip("^")
+ name = k.split(b"|")[0].lstrip(b"^")
if not entry[0].__doc__:
ui.note(('skip checking %s: no help document\n') %
(namefmt % name))
@@ -93,8 +96,8 @@
errorcnt += checkcmdtable(ui, table, '%s command', initlevel_cmd)
- for name in sorted(extensions.enabled().keys() +
- extensions.disabled().keys()):
+ for name in sorted(list(extensions.enabled()) +
+ list(extensions.disabled())):
mod = extensions.load(ui, name, None)
if not mod.__doc__:
ui.note(('skip checking %s extension: no help document\n') % name)
@@ -106,7 +109,7 @@
cmdtable = getattr(mod, 'cmdtable', None)
if cmdtable:
errorcnt += checkcmdtable(ui, cmdtable,
- '%s command of ' + name + ' extension',
+ '%%s command of %s extension' % name,
initlevel_ext_cmd)
return errorcnt
--- a/doc/gendoc.py Tue Jun 13 22:24:41 2017 -0400
+++ b/doc/gendoc.py Tue Jun 20 16:33:46 2017 -0400
@@ -16,6 +16,10 @@
# import from the live mercurial repo
sys.path.insert(0, "..")
from mercurial import demandimport; demandimport.enable()
+# Load util so that the locale path is set by i18n.setdatapath() before
+# calling _().
+from mercurial import util
+util.datapath
from mercurial import (
commands,
extensions,
--- a/hg Tue Jun 13 22:24:41 2017 -0400
+++ b/hg Tue Jun 20 16:33:46 2017 -0400
@@ -28,8 +28,8 @@
# enable importing on demand to reduce startup time
try:
- if sys.version_info[0] < 3:
- from mercurial import demandimport; demandimport.enable()
+ if sys.version_info[0] < 3 or sys.version_info >= (3, 6):
+ import hgdemandimport; hgdemandimport.enable()
except ImportError:
sys.stderr.write("abort: couldn't find mercurial libraries in [%s]\n" %
' '.join(sys.path))
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgdemandimport/__init__.py Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,73 @@
+# hgdemandimport - global demand-loading of modules for Mercurial
+#
+# Copyright 2017 Facebook Inc.
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+'''demandimport - automatic demand-loading of modules'''
+
+# This is in a separate package from mercurial because in Python 3,
+# demand loading is per-package. Keeping demandimport in the mercurial package
+# would disable demand loading for any modules in mercurial.
+
+from __future__ import absolute_import
+
+import sys
+
+if sys.version_info[0] >= 3:
+ from . import demandimportpy3 as demandimport
+else:
+ from . import demandimportpy2 as demandimport
+
+# Extensions can add to this list if necessary.
+ignore = [
+ '__future__',
+ '_hashlib',
+ # ImportError during pkg_resources/__init__.py:fixup_namespace_package
+ '_imp',
+ '_xmlplus',
+ 'fcntl',
+ 'nt', # pathlib2 tests the existence of built-in 'nt' module
+ 'win32com.gen_py',
+ 'win32com.shell', # 'appdirs' tries to import win32com.shell
+ '_winreg', # 2.7 mimetypes needs immediate ImportError
+ 'pythoncom',
+ # imported by tarfile, not available under Windows
+ 'pwd',
+ 'grp',
+ # imported by profile, itself imported by hotshot.stats,
+ # not available under Windows
+ 'resource',
+ # this trips up many extension authors
+ 'gtk',
+ # setuptools' pkg_resources.py expects "from __main__ import x" to
+ # raise ImportError if x not defined
+ '__main__',
+ '_ssl', # conditional imports in the stdlib, issue1964
+ '_sre', # issue4920
+ 'rfc822',
+ 'mimetools',
+ 'sqlalchemy.events', # has import-time side effects (issue5085)
+ # setuptools 8 expects this module to explode early when not on windows
+ 'distutils.msvc9compiler',
+ '__builtin__',
+ 'builtins',
+ 'urwid.command_map', # for pudb
+ ]
+
+_pypy = '__pypy__' in sys.builtin_module_names
+
+if _pypy:
+ ignore.extend([
+ # _ctypes.pointer is shadowed by "from ... import pointer" (PyPy 5)
+ '_ctypes.pointer',
+ ])
+
+demandimport.init(ignore)
+
+# Re-export.
+isenabled = demandimport.isenabled
+enable = demandimport.enable
+disable = demandimport.disable
+deactivated = demandimport.deactivated
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgdemandimport/demandimportpy2.py Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,313 @@
+# demandimport.py - global demand-loading of modules for Mercurial
+#
+# Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+'''
+demandimport - automatic demandloading of modules
+
+To enable this module, do:
+
+ import demandimport; demandimport.enable()
+
+Imports of the following forms will be demand-loaded:
+
+ import a, b.c
+ import a.b as c
+ from a import b,c # a will be loaded immediately
+
+These imports will not be delayed:
+
+ from a import *
+ b = __import__(a)
+'''
+
+from __future__ import absolute_import
+
+import contextlib
+import os
+import sys
+
+# __builtin__ in Python 2, builtins in Python 3.
+try:
+ import __builtin__ as builtins
+except ImportError:
+ import builtins
+
+contextmanager = contextlib.contextmanager
+
+_origimport = __import__
+
+nothing = object()
+
+# Python 3 doesn't have relative imports nor level -1.
+level = -1
+if sys.version_info[0] >= 3:
+ level = 0
+_import = _origimport
+
+def _hgextimport(importfunc, name, globals, *args, **kwargs):
+ try:
+ return importfunc(name, globals, *args, **kwargs)
+ except ImportError:
+ if not globals:
+ raise
+ # extensions are loaded with "hgext_" prefix
+ hgextname = 'hgext_%s' % name
+ nameroot = hgextname.split('.', 1)[0]
+ contextroot = globals.get('__name__', '').split('.', 1)[0]
+ if nameroot != contextroot:
+ raise
+ # retry to import with "hgext_" prefix
+ return importfunc(hgextname, globals, *args, **kwargs)
+
+class _demandmod(object):
+ """module demand-loader and proxy
+
+ Specify 1 as 'level' argument at construction, to import module
+ relatively.
+ """
+
+ def __init__(self, name, globals, locals, level):
+ if '.' in name:
+ head, rest = name.split('.', 1)
+ after = [rest]
+ else:
+ head = name
+ after = []
+ object.__setattr__(self, r"_data",
+ (head, globals, locals, after, level, set()))
+ object.__setattr__(self, r"_module", None)
+
+ def _extend(self, name):
+ """add to the list of submodules to load"""
+ self._data[3].append(name)
+
+ def _addref(self, name):
+ """Record that the named module ``name`` imports this module.
+
+ References to this proxy class having the name of this module will be
+ replaced at module load time. We assume the symbol inside the importing
+ module is identical to the "head" name of this module. We don't
+ actually know if "as X" syntax is being used to change the symbol name
+ because this information isn't exposed to __import__.
+ """
+ self._data[5].add(name)
+
+ def _load(self):
+ if not self._module:
+ head, globals, locals, after, level, modrefs = self._data
+ mod = _hgextimport(_import, head, globals, locals, None, level)
+ if mod is self:
+ # In this case, _hgextimport() above should imply
+ # _demandimport(). Otherwise, _hgextimport() never
+ # returns _demandmod. This isn't intentional behavior,
+ # in fact. (see also issue5304 for detail)
+ #
+ # If self._module is already bound at this point, self
+ # should be already _load()-ed while _hgextimport().
+ # Otherwise, there is no way to import actual module
+ # as expected, because (re-)invoking _hgextimport()
+ # should cause same result.
+ # This is reason why _load() returns without any more
+ # setup but assumes self to be already bound.
+ mod = self._module
+ assert mod and mod is not self, "%s, %s" % (self, mod)
+ return
+
+ # load submodules
+ def subload(mod, p):
+ h, t = p, None
+ if '.' in p:
+ h, t = p.split('.', 1)
+ if getattr(mod, h, nothing) is nothing:
+ setattr(mod, h, _demandmod(p, mod.__dict__, mod.__dict__,
+ level=1))
+ elif t:
+ subload(getattr(mod, h), t)
+
+ for x in after:
+ subload(mod, x)
+
+ # Replace references to this proxy instance with the actual module.
+ if locals:
+ if locals.get(head) is self:
+ locals[head] = mod
+ elif locals.get(head + r'mod') is self:
+ locals[head + r'mod'] = mod
+
+ for modname in modrefs:
+ modref = sys.modules.get(modname, None)
+ if modref and getattr(modref, head, None) is self:
+ setattr(modref, head, mod)
+
+ object.__setattr__(self, r"_module", mod)
+
+ def __repr__(self):
+ if self._module:
+ return "<proxied module '%s'>" % self._data[0]
+ return "<unloaded module '%s'>" % self._data[0]
+
+ def __call__(self, *args, **kwargs):
+ raise TypeError("%s object is not callable" % repr(self))
+
+ def __getattr__(self, attr):
+ self._load()
+ return getattr(self._module, attr)
+
+ def __setattr__(self, attr, val):
+ self._load()
+ setattr(self._module, attr, val)
+
+ @property
+ def __dict__(self):
+ self._load()
+ return self._module.__dict__
+
+ @property
+ def __doc__(self):
+ self._load()
+ return self._module.__doc__
+
+_pypy = '__pypy__' in sys.builtin_module_names
+
+def _demandimport(name, globals=None, locals=None, fromlist=None, level=level):
+ if locals is None or name in ignore or fromlist == ('*',):
+ # these cases we can't really delay
+ return _hgextimport(_import, name, globals, locals, fromlist, level)
+ elif not fromlist:
+ # import a [as b]
+ if '.' in name: # a.b
+ base, rest = name.split('.', 1)
+ # email.__init__ loading email.mime
+ if globals and globals.get('__name__', None) == base:
+ return _import(name, globals, locals, fromlist, level)
+ # if a is already demand-loaded, add b to its submodule list
+ if base in locals:
+ if isinstance(locals[base], _demandmod):
+ locals[base]._extend(rest)
+ return locals[base]
+ return _demandmod(name, globals, locals, level)
+ else:
+ # There is a fromlist.
+ # from a import b,c,d
+ # from . import b,c,d
+ # from .a import b,c,d
+
+ # level == -1: relative and absolute attempted (Python 2 only).
+ # level >= 0: absolute only (Python 2 w/ absolute_import and Python 3).
+ # The modern Mercurial convention is to use absolute_import everywhere,
+ # so modern Mercurial code will have level >= 0.
+
+ # The name of the module the import statement is located in.
+ globalname = globals.get('__name__')
+
+ def processfromitem(mod, attr):
+ """Process an imported symbol in the import statement.
+
+ If the symbol doesn't exist in the parent module, and if the
+ parent module is a package, it must be a module. We set missing
+ modules up as _demandmod instances.
+ """
+ symbol = getattr(mod, attr, nothing)
+ nonpkg = getattr(mod, '__path__', nothing) is nothing
+ if symbol is nothing:
+ if nonpkg:
+ # do not try relative import, which would raise ValueError,
+ # and leave unknown attribute as the default __import__()
+ # would do. the missing attribute will be detected later
+ # while processing the import statement.
+ return
+ mn = '%s.%s' % (mod.__name__, attr)
+ if mn in ignore:
+ importfunc = _origimport
+ else:
+ importfunc = _demandmod
+ symbol = importfunc(attr, mod.__dict__, locals, level=1)
+ setattr(mod, attr, symbol)
+
+ # Record the importing module references this symbol so we can
+ # replace the symbol with the actual module instance at load
+ # time.
+ if globalname and isinstance(symbol, _demandmod):
+ symbol._addref(globalname)
+
+ def chainmodules(rootmod, modname):
+ # recurse down the module chain, and return the leaf module
+ mod = rootmod
+ for comp in modname.split('.')[1:]:
+ if getattr(mod, comp, nothing) is nothing:
+ setattr(mod, comp, _demandmod(comp, mod.__dict__,
+ mod.__dict__, level=1))
+ mod = getattr(mod, comp)
+ return mod
+
+ if level >= 0:
+ if name:
+ # "from a import b" or "from .a import b" style
+ rootmod = _hgextimport(_origimport, name, globals, locals,
+ level=level)
+ mod = chainmodules(rootmod, name)
+ elif _pypy:
+ # PyPy's __import__ throws an exception if invoked
+ # with an empty name and no fromlist. Recreate the
+ # desired behaviour by hand.
+ mn = globalname
+ mod = sys.modules[mn]
+ if getattr(mod, '__path__', nothing) is nothing:
+ mn = mn.rsplit('.', 1)[0]
+ mod = sys.modules[mn]
+ if level > 1:
+ mn = mn.rsplit('.', level - 1)[0]
+ mod = sys.modules[mn]
+ else:
+ mod = _hgextimport(_origimport, name, globals, locals,
+ level=level)
+
+ for x in fromlist:
+ processfromitem(mod, x)
+
+ return mod
+
+ # But, we still need to support lazy loading of standard library and 3rd
+ # party modules. So handle level == -1.
+ mod = _hgextimport(_origimport, name, globals, locals)
+ mod = chainmodules(mod, name)
+
+ for x in fromlist:
+ processfromitem(mod, x)
+
+ return mod
+
+ignore = []
+
+def init(ignorelist):
+ global ignore
+ ignore = ignorelist
+
+def isenabled():
+ return builtins.__import__ == _demandimport
+
+def enable():
+ "enable global demand-loading of modules"
+ if os.environ.get('HGDEMANDIMPORT') != 'disable':
+ builtins.__import__ = _demandimport
+
+def disable():
+ "disable global demand-loading of modules"
+ builtins.__import__ = _origimport
+
+@contextmanager
+def deactivated():
+ "context manager for disabling demandimport in 'with' blocks"
+ demandenabled = isenabled()
+ if demandenabled:
+ disable()
+
+ try:
+ yield
+ finally:
+ if demandenabled:
+ enable()
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgdemandimport/demandimportpy3.py Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,112 @@
+# demandimportpy3 - global demand-loading of modules for Mercurial
+#
+# Copyright 2017 Facebook Inc.
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+"""Lazy loading for Python 3.6 and above.
+
+This uses the new importlib finder/loader functionality available in Python 3.5
+and up. The code reuses most of the mechanics implemented inside importlib.util,
+but with a few additions:
+
+* Allow excluding certain modules from lazy imports.
+* Expose an interface that's substantially the same as demandimport for
+ Python 2.
+
+This also has some limitations compared to the Python 2 implementation:
+
+* Much of the logic is per-package, not per-module, so any packages loaded
+ before demandimport is enabled will not be lazily imported in the future. In
+ practice, we only expect builtins to be loaded before demandimport is
+ enabled.
+"""
+
+# This line is unnecessary, but it satisfies test-check-py3-compat.t.
+from __future__ import absolute_import
+
+import contextlib
+import os
+import sys
+
+import importlib.abc
+import importlib.machinery
+import importlib.util
+
+_deactivated = False
+
+class _lazyloaderex(importlib.util.LazyLoader):
+ """This is a LazyLoader except it also follows the _deactivated global and
+ the ignore list.
+ """
+ def exec_module(self, module):
+ """Make the module load lazily."""
+ if _deactivated or module.__name__ in ignore:
+ self.loader.exec_module(module)
+ else:
+ super().exec_module(module)
+
+# This is 3.6+ because with Python 3.5 it isn't possible to lazily load
+# extensions. See the discussion in https://python.org/sf/26186 for more.
+_extensions_loader = _lazyloaderex.factory(
+ importlib.machinery.ExtensionFileLoader)
+_bytecode_loader = _lazyloaderex.factory(
+ importlib.machinery.SourcelessFileLoader)
+_source_loader = _lazyloaderex.factory(importlib.machinery.SourceFileLoader)
+
+def _makefinder(path):
+ return importlib.machinery.FileFinder(
+ path,
+ # This is the order in which loaders are passed in in core Python.
+ (_extensions_loader, importlib.machinery.EXTENSION_SUFFIXES),
+ (_source_loader, importlib.machinery.SOURCE_SUFFIXES),
+ (_bytecode_loader, importlib.machinery.BYTECODE_SUFFIXES),
+ )
+
+ignore = []
+
+def init(ignorelist):
+ global ignore
+ ignore = ignorelist
+
+def isenabled():
+ return _makefinder in sys.path_hooks and not _deactivated
+
+def disable():
+ try:
+ while True:
+ sys.path_hooks.remove(_makefinder)
+ except ValueError:
+ pass
+
+def enable():
+ if os.environ.get('HGDEMANDIMPORT') != 'disable':
+ sys.path_hooks.insert(0, _makefinder)
+
+@contextlib.contextmanager
+def deactivated():
+ # This implementation is a bit different from Python 2's. Python 3
+ # maintains a per-package finder cache in sys.path_importer_cache (see
+ # PEP 302). This means that we can't just call disable + enable.
+ # If we do that, in situations like:
+ #
+ # demandimport.enable()
+ # ...
+ # from foo.bar import mod1
+ # with demandimport.deactivated():
+ # from foo.bar import mod2
+ #
+ # mod2 will be imported lazily. (The converse also holds -- whatever finder
+ # first gets cached will be used.)
+ #
+ # Instead, have a global flag the LazyLoader can use.
+ global _deactivated
+ demandenabled = isenabled()
+ if demandenabled:
+ _deactivated = True
+ try:
+ yield
+ finally:
+ if demandenabled:
+ _deactivated = False
--- a/hgext/blackbox.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/blackbox.py Tue Jun 20 16:33:46 2017 -0400
@@ -44,13 +44,13 @@
from mercurial.node import hex
from mercurial import (
- cmdutil,
+ registrar,
ui as uimod,
util,
)
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
@@ -171,9 +171,10 @@
return
try:
ui._bbinlog = True
- date = util.datestr(None, '%Y/%m/%d %H:%M:%S')
+ default = self.configdate('devel', 'default-date')
+ date = util.datestr(default, '%Y/%m/%d %H:%M:%S')
user = util.getuser()
- pid = str(util.getpid())
+ pid = '%d' % util.getpid()
formattedmsg = msg[0] % msg[1:]
rev = '(unknown)'
changed = ''
--- a/hgext/bugzilla.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/bugzilla.py Tue Jun 20 16:33:46 2017 -0400
@@ -1040,8 +1040,9 @@
if not mapfile and not tmpl:
tmpl = _('changeset {node|short} in repo {root} refers '
'to bug {bug}.\ndetails:\n\t{desc|tabindent}')
- t = cmdutil.changeset_templater(self.ui, self.repo,
- False, None, tmpl, mapfile, False)
+ spec = cmdutil.logtemplatespec(tmpl, mapfile)
+ t = cmdutil.changeset_templater(self.ui, self.repo, spec,
+ False, None, False)
self.ui.pushbuffer()
t.show(ctx, changes=ctx.changeset(),
bug=str(bugid),
--- a/hgext/censor.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/censor.py Tue Jun 20 16:33:46 2017 -0400
@@ -31,17 +31,17 @@
from mercurial.node import short
from mercurial import (
- cmdutil,
error,
filelog,
lock as lockmod,
+ registrar,
revlog,
scmutil,
util,
)
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
@@ -102,7 +102,7 @@
hint=_('clean/delete/update first'))
flogv = flog.version & 0xFFFF
- if flogv != revlog.REVLOGNG:
+ if flogv != revlog.REVLOGV1:
raise error.Abort(
_('censor does not support revlog version %d') % (flogv,))
@@ -117,7 +117,7 @@
# Using two files instead of one makes it easy to rewrite entry-by-entry
idxread = repo.svfs(flog.indexfile, 'r')
idxwrite = repo.svfs(flog.indexfile, 'wb', atomictemp=True)
- if flog.version & revlog.REVLOGNGINLINEDATA:
+ if flog.version & revlog.FLAG_INLINE_DATA:
dataread, datawrite = idxread, idxwrite
else:
dataread = repo.svfs(flog.datafile, 'r')
--- a/hgext/children.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/children.py Tue Jun 20 16:33:46 2017 -0400
@@ -19,13 +19,13 @@
from mercurial.i18n import _
from mercurial import (
cmdutil,
- commands,
+ registrar,
)
-templateopts = commands.templateopts
+templateopts = cmdutil.templateopts
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
--- a/hgext/churn.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/churn.py Tue Jun 20 16:33:46 2017 -0400
@@ -17,24 +17,21 @@
from mercurial.i18n import _
from mercurial import (
cmdutil,
- commands,
encoding,
patch,
+ registrar,
scmutil,
util,
)
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
# leave the attribute unspecified.
testedwith = 'ships-with-hg-core'
-def maketemplater(ui, repo, tmpl):
- return cmdutil.changeset_templater(ui, repo, False, None, tmpl, None, False)
-
def changedlines(ui, repo, ctx1, ctx2, fns):
added, removed = 0, 0
fmatch = scmutil.matchfiles(repo, fns)
@@ -55,7 +52,7 @@
return date.strftime(opts['dateformat'])
else:
tmpl = opts.get('oldtemplate') or opts.get('template')
- tmpl = maketemplater(ui, repo, tmpl)
+ tmpl = cmdutil.makelogtemplater(ui, repo, tmpl)
def getkey(ctx):
ui.pushbuffer()
tmpl.show(ctx)
@@ -114,7 +111,7 @@
('s', 'sort', False, _('sort by key (default: sort by count)')),
('', 'diffstat', False, _('display added/removed lines separately')),
('', 'aliases', '', _('file with email aliases'), _('FILE')),
- ] + commands.walkopts,
+ ] + cmdutil.walkopts,
_("hg churn [-d DATE] [-r REV] [--aliases FILE] [FILE]"),
inferrepo=True)
def churn(ui, repo, *pats, **opts):
--- a/hgext/clonebundles.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/clonebundles.py Tue Jun 20 16:33:46 2017 -0400
@@ -136,7 +136,7 @@
Manifests can contain multiple entries. Assuming metadata is defined, clients
will filter entries from the manifest that they don't support. The remaining
entries are optionally sorted by client preferences
-(``experimental.clonebundleprefers`` config option). The client then attempts
+(``ui.clonebundleprefers`` config option). The client then attempts
to fetch the bundle at the first URL in the remaining list.
**Errors when downloading a bundle will fail the entire clone operation:
--- a/hgext/convert/__init__.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/convert/__init__.py Tue Jun 20 16:33:46 2017 -0400
@@ -11,7 +11,6 @@
from mercurial.i18n import _
from mercurial import (
- cmdutil,
registrar,
)
@@ -22,7 +21,7 @@
)
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
@@ -182,8 +181,8 @@
where "original_branch_name" is the name of the branch in the
source repository, and "new_branch_name" is the name of the branch
- is the destination repository. No whitespace is allowed in the
- branch names. This can be used to (for instance) move code in one
+ is the destination repository. No whitespace is allowed in the new
+ branch name. This can be used to (for instance) move code in one
repository from "default" to a named branch.
Mercurial Source
--- a/hgext/convert/filemap.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/convert/filemap.py Tue Jun 20 16:33:46 2017 -0400
@@ -310,7 +310,7 @@
# map to any revision in the restricted graph. Put SKIPREV
# in the set of wanted ancestors to simplify code elsewhere
self.parentmap[rev] = SKIPREV
- self.wantedancestors[rev] = set((SKIPREV,))
+ self.wantedancestors[rev] = {SKIPREV}
return
# Reuse the data from our parent.
--- a/hgext/convert/git.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/convert/git.py Tue Jun 20 16:33:46 2017 -0400
@@ -32,7 +32,7 @@
return "%s %s" % (self.node, self.path)
# Keys in extra fields that should not be copied if the user requests.
-bannedextrakeys = set([
+bannedextrakeys = {
# Git commit object built-ins.
'tree',
'parent',
@@ -41,7 +41,7 @@
# Mercurial built-ins.
'branch',
'close',
-])
+}
class convert_git(common.converter_source, common.commandline):
# Windows does not support GIT_DIR= construct while other systems
@@ -455,9 +455,9 @@
('refs/heads/', '')
]
- exclude = set([
+ exclude = {
'refs/remotes/origin/HEAD',
- ])
+ }
try:
output, status = self.gitrunlines('show-ref')
--- a/hgext/extdiff.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/extdiff.py Tue Jun 20 16:33:46 2017 -0400
@@ -74,16 +74,16 @@
from mercurial import (
archival,
cmdutil,
- commands,
error,
filemerge,
pycompat,
+ registrar,
scmutil,
util,
)
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
@@ -101,7 +101,7 @@
dirname = '%s.%s' % (dirname, short(node))
base = os.path.join(tmproot, dirname)
os.mkdir(base)
- fns_and_mtime = []
+ fnsandstat = []
if node is not None:
ui.note(_('making snapshot of %d files from rev %s\n') %
@@ -124,9 +124,8 @@
if node is None:
dest = os.path.join(base, wfn)
- fns_and_mtime.append((dest, repo.wjoin(fn),
- os.lstat(dest).st_mtime))
- return dirname, fns_and_mtime
+ fnsandstat.append((dest, repo.wjoin(fn), os.lstat(dest)))
+ return dirname, fnsandstat
def dodiff(ui, repo, cmdline, pats, opts):
'''Do the actual diff:
@@ -199,7 +198,7 @@
dir1b = None
rev1b = ''
- fns_and_mtime = []
+ fnsandstat = []
# If node2 in not the wc or there is >1 change, copy it
dir2root = ''
@@ -212,8 +211,8 @@
#the working dir in this case (because the other cases
#are: diffing 2 revisions or single file -- in which case
#the file is already directly passed to the diff tool).
- dir2, fns_and_mtime = snapshot(ui, repo, modadd, None, tmproot,
- subrepos)
+ dir2, fnsandstat = snapshot(ui, repo, modadd, None, tmproot,
+ subrepos)
else:
# This lets the diff tool open the changed file directly
dir2 = ''
@@ -241,7 +240,7 @@
else:
template = 'hg-%h.patch'
cmdutil.export(repo, [repo[node1a].rev(), repo[node2].rev()],
- template=repo.vfs.reljoin(tmproot, template),
+ fntemplate=repo.vfs.reljoin(tmproot, template),
match=matcher)
label1a = cmdutil.makefilename(repo, template, node1a)
label2 = cmdutil.makefilename(repo, template, node2)
@@ -249,7 +248,7 @@
dir2 = repo.vfs.reljoin(tmproot, label2)
dir1b = None
label1b = None
- fns_and_mtime = []
+ fnsandstat = []
# Function to quote file/dir names in the argument string.
# When not operating in 3-way mode, an empty string is
@@ -275,8 +274,17 @@
ui.debug('running %r in %s\n' % (cmdline, tmproot))
ui.system(cmdline, cwd=tmproot, blockedtag='extdiff')
- for copy_fn, working_fn, mtime in fns_and_mtime:
- if os.lstat(copy_fn).st_mtime != mtime:
+ for copy_fn, working_fn, st in fnsandstat:
+ cpstat = os.lstat(copy_fn)
+ # Some tools copy the file and attributes, so mtime may not detect
+ # all changes. A size check will detect more cases, but not all.
+ # The only certain way to detect every case is to diff all files,
+ # which could be expensive.
+ # copyfile() carries over the permission, so the mode check could
+ # be in an 'elif' branch, but for the case where the file has
+ # changed without affecting mtime or size.
+ if (cpstat.st_mtime != st.st_mtime or cpstat.st_size != st.st_size
+ or (cpstat.st_mode & 0o100) != (st.st_mode & 0o100)):
ui.debug('file changed while diffing. '
'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn))
util.copyfile(copy_fn, working_fn)
@@ -292,7 +300,7 @@
('r', 'rev', [], _('revision'), _('REV')),
('c', 'change', '', _('change made by revision'), _('REV')),
('', 'patch', None, _('compare patches for two revisions'))
- ] + commands.walkopts + commands.subrepoopts
+ ] + cmdutil.walkopts + cmdutil.subrepoopts
@command('extdiff',
[('p', 'program', '', _('comparison program to run'), _('CMD')),
--- a/hgext/fetch.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/fetch.py Tue Jun 20 16:33:46 2017 -0400
@@ -15,17 +15,17 @@
)
from mercurial import (
cmdutil,
- commands,
error,
exchange,
hg,
lock,
+ registrar,
util,
)
release = lock.release
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
@@ -38,7 +38,7 @@
('e', 'edit', None, _('invoke editor on commit messages')),
('', 'force-editor', None, _('edit commit message (DEPRECATED)')),
('', 'switch-parent', None, _('switch parents when merging')),
- ] + commands.commitopts + commands.commitopts2 + commands.remoteopts,
+ ] + cmdutil.commitopts + cmdutil.commitopts2 + cmdutil.remoteopts,
_('hg fetch [SOURCE]'))
def fetch(ui, repo, source='default', **opts):
'''pull changes from a remote repository, merge new changes if needed.
--- a/hgext/fsmonitor/__init__.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/fsmonitor/__init__.py Tue Jun 20 16:33:46 2017 -0400
@@ -148,19 +148,7 @@
"""
sha1 = hashlib.sha1()
- if util.safehasattr(ignore, 'includepat'):
- sha1.update(ignore.includepat)
- sha1.update('\0\0')
- if util.safehasattr(ignore, 'excludepat'):
- sha1.update(ignore.excludepat)
- sha1.update('\0\0')
- if util.safehasattr(ignore, 'patternspat'):
- sha1.update(ignore.patternspat)
- sha1.update('\0\0')
- if util.safehasattr(ignore, '_files'):
- for f in ignore._files:
- sha1.update(f)
- sha1.update('\0')
+ sha1.update(repr(ignore))
return sha1.hexdigest()
_watchmanencoding = pywatchman.encoding.get_local_encoding()
@@ -253,10 +241,10 @@
fresh_instance = False
exact = skipstep3 = False
- if matchfn == match.exact: # match.exact
+ if match.isexact(): # match.exact
exact = True
dirignore = util.always # skip step 2
- elif match.files() and not match.anypats(): # match.match, no patterns
+ elif match.prefix(): # match.match, no patterns
skipstep3 = True
if not exact and self._checkcase:
@@ -497,17 +485,14 @@
else:
stateunknown = listunknown
+ if updatestate:
+ ps = poststatus(startclock)
+ self.addpostdsstatus(ps)
+
r = orig(node1, node2, match, listignored, listclean, stateunknown,
listsubrepos)
modified, added, removed, deleted, unknown, ignored, clean = r
- if updatestate:
- notefiles = modified + added + removed + deleted + unknown
- self._fsmonitorstate.set(
- self._fsmonitorstate.getlastclock() or startclock,
- _hashignore(self.dirstate._ignore),
- notefiles)
-
if not listunknown:
unknown = []
@@ -540,6 +525,17 @@
return scmutil.status(
modified, added, removed, deleted, unknown, ignored, clean)
+class poststatus(object):
+ def __init__(self, startclock):
+ self._startclock = startclock
+
+ def __call__(self, wctx, status):
+ clock = wctx.repo()._fsmonitorstate.getlastclock() or self._startclock
+ hashignore = _hashignore(wctx.repo().dirstate._ignore)
+ notefiles = (status.modified + status.added + status.removed +
+ status.deleted + status.unknown)
+ wctx.repo()._fsmonitorstate.set(clock, hashignore, notefiles)
+
def makedirstate(cls):
class fsmonitordirstate(cls):
def _fsmonitorinit(self, fsmonitorstate, watchmanclient):
@@ -573,7 +569,8 @@
return ds
def extsetup(ui):
- wrapfilecache(localrepo.localrepository, 'dirstate', wrapdirstate)
+ extensions.wrapfilecache(
+ localrepo.localrepository, 'dirstate', wrapdirstate)
if pycompat.sysplatform == 'darwin':
# An assist for avoiding the dangling-symlink fsevents bug
extensions.wrapfunction(os, 'symlink', wrapsymlink)
@@ -600,18 +597,31 @@
self.node = node
self.distance = distance
self.partial = partial
+ self._lock = None
+ self.need_leave = False
def __enter__(self):
- self._state('state-enter')
+ # We explicitly need to take a lock here, before we proceed to update
+ # watchman about the update operation, so that we don't race with
+ # some other actor. merge.update is going to take the wlock almost
+ # immediately anyway, so this is effectively extending the lock
+ # around a couple of short sanity checks.
+ self._lock = self.repo.wlock()
+ self.need_leave = self._state('state-enter')
return self
def __exit__(self, type_, value, tb):
- status = 'ok' if type_ is None else 'failed'
- self._state('state-leave', status=status)
+ try:
+ if self.need_leave:
+ status = 'ok' if type_ is None else 'failed'
+ self._state('state-leave', status=status)
+ finally:
+ if self._lock:
+ self._lock.release()
def _state(self, cmd, status='ok'):
if not util.safehasattr(self.repo, '_watchmanclient'):
- return
+ return False
try:
commithash = self.repo[self.node].hex()
self.repo._watchmanclient.command(cmd, {
@@ -626,10 +636,12 @@
# whether the working copy parent is changing
'partial': self.partial,
}})
+ return True
except Exception as e:
# Swallow any errors; fire and forget
self.repo.ui.log(
'watchman', 'Exception %s while running %s\n', e, cmd)
+ return False
# Bracket working copy updates with calls to the watchman state-enter
# and state-leave commands. This allows clients to perform more intelligent
@@ -654,7 +666,7 @@
with state_update(repo, node, distance, partial):
return orig(
repo, node, branchmerge, force, ancestor, mergeancestor,
- labels, matcher, *kwargs)
+ labels, matcher, **kwargs)
def reposetup(ui, repo):
# We don't work with largefiles or inotify
@@ -692,11 +704,13 @@
# at this point since fsmonitorstate wasn't present, repo.dirstate is
# not a fsmonitordirstate
- repo.dirstate.__class__ = makedirstate(repo.dirstate.__class__)
- # nuke the dirstate so that _fsmonitorinit and subsequent configuration
- # changes take effect on it
- del repo._filecache['dirstate']
- delattr(repo.unfiltered(), 'dirstate')
+ dirstate = repo.dirstate
+ dirstate.__class__ = makedirstate(dirstate.__class__)
+ dirstate._fsmonitorinit(fsmonitorstate, client)
+ # invalidate property cache, but keep filecache which contains the
+ # wrapped dirstate object
+ del repo.unfiltered().__dict__['dirstate']
+ assert dirstate is repo._filecache['dirstate'].obj
class fsmonitorrepo(repo.__class__):
def status(self, *args, **kwargs):
@@ -704,21 +718,3 @@
return overridestatus(orig, self, *args, **kwargs)
repo.__class__ = fsmonitorrepo
-
-def wrapfilecache(cls, propname, wrapper):
- """Wraps a filecache property. These can't be wrapped using the normal
- wrapfunction. This should eventually go into upstream Mercurial.
- """
- assert callable(wrapper)
- for currcls in cls.__mro__:
- if propname in currcls.__dict__:
- origfn = currcls.__dict__[propname].func
- assert callable(origfn)
- def wrap(*args, **kwargs):
- return wrapper(origfn, *args, **kwargs)
- currcls.__dict__[propname].func = wrap
- break
-
- if currcls is object:
- raise AttributeError(
- _("type '%s' has no property '%s'") % (cls, propname))
--- a/hgext/fsmonitor/state.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/fsmonitor/state.py Tue Jun 20 16:33:46 2017 -0400
@@ -13,7 +13,10 @@
import struct
from mercurial.i18n import _
-from mercurial import pathutil
+from mercurial import (
+ pathutil,
+ util,
+)
_version = 4
_versionformat = ">I"
@@ -24,6 +27,7 @@
self._ui = repo.ui
self._rootdir = pathutil.normasprefix(repo.root)
self._lastclock = None
+ self._identity = util.filestat(None)
self.mode = self._ui.config('fsmonitor', 'mode', default='on')
self.walk_on_invalidate = self._ui.configbool(
@@ -35,10 +39,13 @@
try:
file = self._vfs('fsmonitor.state', 'rb')
except IOError as inst:
+ self._identity = util.filestat(None)
if inst.errno != errno.ENOENT:
raise
return None, None, None
+ self._identity = util.filestat.fromfp(file)
+
versionbytes = file.read(4)
if len(versionbytes) < 4:
self._ui.log(
@@ -90,8 +97,16 @@
self.invalidate()
return
+ # Read the identity from the file on disk rather than from the open file
+ # pointer below, because the latter is actually a brand new file.
+ identity = util.filestat.frompath(self._vfs.join('fsmonitor.state'))
+ if identity != self._identity:
+ self._ui.debug('skip updating fsmonitor.state: identity mismatch\n')
+ return
+
try:
- file = self._vfs('fsmonitor.state', 'wb', atomictemp=True)
+ file = self._vfs('fsmonitor.state', 'wb', atomictemp=True,
+ checkambig=True)
except (IOError, OSError):
self._ui.warn(_("warning: unable to write out fsmonitor state\n"))
return
@@ -111,6 +126,7 @@
except OSError as inst:
if inst.errno != errno.ENOENT:
raise
+ self._identity = util.filestat(None)
def setlastclock(self, clock):
self._lastclock = clock
--- a/hgext/gpg.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/gpg.py Tue Jun 20 16:33:46 2017 -0400
@@ -14,16 +14,16 @@
from mercurial.i18n import _
from mercurial import (
cmdutil,
- commands,
error,
match,
node as hgnode,
pycompat,
+ registrar,
util,
)
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
@@ -221,7 +221,7 @@
('m', 'message', '',
_('use text as commit message'), _('TEXT')),
('e', 'edit', False, _('invoke editor on commit messages')),
- ] + commands.commitopts2,
+ ] + cmdutil.commitopts2,
_('hg sign [OPTION]... [REV]...'))
def sign(ui, repo, *revs, **opts):
"""add a signature for the current or given revision
--- a/hgext/graphlog.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/graphlog.py Tue Jun 20 16:33:46 2017 -0400
@@ -21,10 +21,11 @@
from mercurial import (
cmdutil,
commands,
+ registrar,
)
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
@@ -51,7 +52,7 @@
_('show changesets within the given named branch'), _('BRANCH')),
('P', 'prune', [],
_('do not display revision or any of its ancestors'), _('REV')),
- ] + commands.logopts + commands.walkopts,
+ ] + cmdutil.logopts + cmdutil.walkopts,
_('[OPTION]... [FILE]'),
inferrepo=True)
def glog(ui, repo, *pats, **opts):
--- a/hgext/hgk.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/hgk.py Tue Jun 20 16:33:46 2017 -0400
@@ -45,15 +45,15 @@
short,
)
from mercurial import (
- cmdutil,
commands,
obsolete,
patch,
+ registrar,
scmutil,
)
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
--- a/hgext/highlight/highlight.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/highlight/highlight.py Tue Jun 20 16:33:46 2017 -0400
@@ -10,11 +10,6 @@
from __future__ import absolute_import
-import pygments
-import pygments.formatters
-import pygments.lexers
-import pygments.util
-
from mercurial import demandimport
demandimport.ignore.extend(['pkgutil', 'pkg_resources', '__main__'])
@@ -23,6 +18,12 @@
util,
)
+with demandimport.deactivated():
+ import pygments
+ import pygments.formatters
+ import pygments.lexers
+ import pygments.util
+
highlight = pygments.highlight
ClassNotFound = pygments.util.ClassNotFound
guess_lexer = pygments.lexers.guess_lexer
--- a/hgext/histedit.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/histedit.py Tue Jun 20 16:33:46 2017 -0400
@@ -201,6 +201,7 @@
mergeutil,
node,
obsolete,
+ registrar,
repair,
scmutil,
util,
@@ -209,7 +210,7 @@
pickle = util.pickle
release = lock.release
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
@@ -1198,8 +1199,8 @@
gen = exchange.readbundle(ui, f, backupfile)
with repo.transaction('histedit.abort') as tr:
if not isinstance(gen, bundle2.unbundle20):
- gen.apply(repo, 'histedit', 'bundle:' + backupfile)
- if isinstance(gen, bundle2.unbundle20):
+ gen.apply(repo, tr, 'histedit', 'bundle:' + backupfile)
+ else:
bundle2.applybundle(repo, gen, tr,
source='histedit',
url='bundle:' + backupfile)
@@ -1631,7 +1632,7 @@
key=repo.changelog.rev)
markers = [getmarker(t) for t in sortednodes]
if markers:
- obsolete.createmarkers(repo, markers)
+ obsolete.createmarkers(repo, markers, operation='histedit')
else:
return cleanupnode(ui, repo, name, nodes)
@@ -1641,8 +1642,8 @@
if os.path.exists(os.path.join(repo.path, 'histedit-state')):
state = histeditstate(repo)
state.read()
- histedit_nodes = set([action.node for action
- in state.actions if action.node])
+ histedit_nodes = {action.node for action
+ in state.actions if action.node}
common_nodes = histedit_nodes & set(nodelist)
if common_nodes:
raise error.Abort(_("histedit in progress, can't strip %s")
--- a/hgext/journal.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/journal.py Tue Jun 20 16:33:46 2017 -0400
@@ -23,7 +23,6 @@
from mercurial import (
bookmarks,
cmdutil,
- commands,
dispatch,
error,
extensions,
@@ -31,13 +30,14 @@
localrepo,
lock,
node,
+ registrar,
util,
)
from . import share
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
@@ -158,7 +158,7 @@
util.safehasattr(repo, 'journal')):
sharedrepo = share._getsrcrepo(repo)
sharedfeatures = _readsharedfeatures(repo)
- if sharedrepo and sharedfeatures > set(['journal']):
+ if sharedrepo and sharedfeatures > {'journal'}:
# there is a shared repository and there are shared journal entries
# to copy. move shared date over from source to destination but
# move the local file first
@@ -420,7 +420,7 @@
'journal', [
('', 'all', None, 'show history for all names'),
('c', 'commits', None, 'show commit metadata'),
- ] + [opt for opt in commands.logopts if opt[1] not in _ignoreopts],
+ ] + [opt for opt in cmdutil.logopts if opt[1] not in _ignoreopts],
'[OPTION]... [BOOKMARKNAME]')
def journal(ui, repo, *args, **opts):
"""show the previous position of bookmarks and the working copy
--- a/hgext/keyword.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/keyword.py Tue Jun 20 16:33:46 2017 -0400
@@ -94,7 +94,6 @@
from mercurial import (
cmdutil,
- commands,
context,
dispatch,
error,
@@ -111,7 +110,7 @@
)
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
@@ -240,8 +239,8 @@
'''Replaces keywords in data with expanded template.'''
def kwsub(mobj):
kw = mobj.group(1)
- ct = cmdutil.changeset_templater(self.ui, self.repo, False, None,
- self.templates[kw], '', False)
+ ct = cmdutil.makelogtemplater(self.ui, self.repo,
+ self.templates[kw])
self.ui.pushbuffer()
ct.show(ctx, root=self.repo.root, file=path)
ekw = templatefilters.firstline(self.ui.popbuffer())
@@ -481,7 +480,7 @@
repo.wvfs.rmtree(repo.root)
@command('kwexpand',
- commands.walkopts,
+ cmdutil.walkopts,
_('hg kwexpand [OPTION]... [FILE]...'),
inferrepo=True)
def expand(ui, repo, *pats, **opts):
@@ -498,7 +497,7 @@
[('A', 'all', None, _('show keyword status flags of all files')),
('i', 'ignore', None, _('show files excluded from expansion')),
('u', 'unknown', None, _('only show unknown (not tracked) files')),
- ] + commands.walkopts,
+ ] + cmdutil.walkopts,
_('hg kwfiles [OPTION]... [FILE]...'),
inferrepo=True)
def files(ui, repo, *pats, **opts):
@@ -557,7 +556,7 @@
fm.end()
@command('kwshrink',
- commands.walkopts,
+ cmdutil.walkopts,
_('hg kwshrink [OPTION]... [FILE]...'),
inferrepo=True)
def shrink(ui, repo, *pats, **opts):
@@ -640,22 +639,21 @@
return n
def rollback(self, dryrun=False, force=False):
- wlock = self.wlock()
- origrestrict = kwt.restrict
- try:
- if not dryrun:
- changed = self['.'].files()
- ret = super(kwrepo, self).rollback(dryrun, force)
- if not dryrun:
- ctx = self['.']
- modified, added = _preselect(ctx.status(), changed)
- kwt.restrict = False
- kwt.overwrite(ctx, modified, True, True)
- kwt.overwrite(ctx, added, True, False)
- return ret
- finally:
- kwt.restrict = origrestrict
- wlock.release()
+ with self.wlock():
+ origrestrict = kwt.restrict
+ try:
+ if not dryrun:
+ changed = self['.'].files()
+ ret = super(kwrepo, self).rollback(dryrun, force)
+ if not dryrun:
+ ctx = self['.']
+ modified, added = _preselect(ctx.status(), changed)
+ kwt.restrict = False
+ kwt.overwrite(ctx, modified, True, True)
+ kwt.overwrite(ctx, added, True, False)
+ return ret
+ finally:
+ kwt.restrict = origrestrict
# monkeypatches
def kwpatchfile_init(orig, self, ui, gp, backend, store, eolmode=None):
--- a/hgext/largefiles/__init__.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/largefiles/__init__.py Tue Jun 20 16:33:46 2017 -0400
@@ -129,7 +129,7 @@
def featuresetup(ui, supported):
# don't die on seeing a repo with the largefiles requirement
- supported |= set(['largefiles'])
+ supported |= {'largefiles'}
def uisetup(ui):
localrepo.localrepository.featuresetupfuncs.add(featuresetup)
--- a/hgext/largefiles/lfcommands.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/largefiles/lfcommands.py Tue Jun 20 16:33:46 2017 -0400
@@ -18,13 +18,13 @@
from mercurial import (
cmdutil,
- commands,
context,
error,
hg,
lock,
match as matchmod,
node,
+ registrar,
scmutil,
util,
)
@@ -44,7 +44,7 @@
# -- Commands ----------------------------------------------------------
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
@command('lfconvert',
[('s', 'size', '',
@@ -541,7 +541,7 @@
@command('lfpull',
[('r', 'rev', [], _('pull largefiles for these revisions'))
- ] + commands.remoteopts,
+ ] + cmdutil.remoteopts,
_('-r REV... [-e CMD] [--remotecmd CMD] [SOURCE]'))
def lfpull(ui, repo, source="default", **opts):
"""pull largefiles for the specified revisions from the specified source
--- a/hgext/largefiles/lfutil.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/largefiles/lfutil.py Tue Jun 20 16:33:46 2017 -0400
@@ -296,8 +296,6 @@
if not pats:
pats = [wvfs.join(standindir)]
match = scmutil.match(repo[None], pats, badfn=badfn)
- # if pats is empty, it would incorrectly always match, so clear _always
- match._always = False
else:
# no patterns: relative to repo root
match = scmutil.match(repo[None], [wvfs.join(standindir)], badfn=badfn)
--- a/hgext/largefiles/overrides.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/largefiles/overrides.py Tue Jun 20 16:33:46 2017 -0400
@@ -41,8 +41,8 @@
m = copy.copy(match)
lfile = lambda f: lfutil.standin(f) in manifest
m._files = filter(lfile, m._files)
- m._fileroots = set(m._files)
- m._always = False
+ m._fileset = set(m._files)
+ m.always = lambda: False
origmatchfn = m.matchfn
m.matchfn = lambda f: lfile(f) and origmatchfn(f)
return m
@@ -56,8 +56,8 @@
notlfile = lambda f: not (lfutil.isstandin(f) or lfutil.standin(f) in
manifest or f in excluded)
m._files = filter(notlfile, m._files)
- m._fileroots = set(m._files)
- m._always = False
+ m._fileset = set(m._files)
+ m.always = lambda: False
origmatchfn = m.matchfn
m.matchfn = lambda f: notlfile(f) and origmatchfn(f)
return m
@@ -105,9 +105,9 @@
scmutil.matchandpats)
def addlargefiles(ui, repo, isaddremove, matcher, **opts):
- large = opts.get('large')
+ large = opts.get(r'large')
lfsize = lfutil.getminsize(
- ui, lfutil.islfilesrepo(repo), opts.get('lfsize'))
+ ui, lfutil.islfilesrepo(repo), opts.get(r'lfsize'))
lfmatcher = None
if lfutil.islfilesrepo(repo):
@@ -119,7 +119,7 @@
m = matcher
wctx = repo[None]
- for f in repo.walk(matchmod.badmatch(m, lambda x, y: None)):
+ for f in wctx.walk(matchmod.badmatch(m, lambda x, y: None)):
exact = m.exact(f)
lfile = lfutil.standin(f) in wctx
nfile = f in wctx
@@ -258,7 +258,7 @@
def cmdutiladd(orig, ui, repo, matcher, prefix, explicitonly, **opts):
# The --normal flag short circuits this override
- if opts.get('normal'):
+ if opts.get(r'normal'):
return orig(ui, repo, matcher, prefix, explicitonly, **opts)
ladded, lbad = addlargefiles(ui, repo, False, matcher, **opts)
@@ -347,14 +347,12 @@
else:
f = m._cwd + '/' + f
return back + lfutil.standin(f)
-
- pats.update(fixpats(f, tostandin) for f in p)
else:
def tostandin(f):
if lfutil.isstandin(f):
return f
return lfutil.standin(f)
- pats.update(fixpats(f, tostandin) for f in p)
+ pats.update(fixpats(f, tostandin) for f in p)
for i in range(0, len(m._files)):
# Don't add '.hglf' to m.files, since that is already covered by '.'
@@ -370,8 +368,8 @@
elif m._files[i] not in ctx and repo.wvfs.isdir(standin):
m._files.append(standin)
- m._fileroots = set(m._files)
- m._always = False
+ m._fileset = set(m._files)
+ m.always = lambda: False
origmatchfn = m.matchfn
def lfmatchfn(f):
lf = lfutil.splitstandin(f)
@@ -381,7 +379,7 @@
return r
m.matchfn = lfmatchfn
- ui.debug('updated patterns: %s\n' % sorted(pats))
+ ui.debug('updated patterns: %s\n' % ', '.join(sorted(pats)))
return m, pats
# For hg log --patch, the match object is used in two different senses:
@@ -646,7 +644,7 @@
m = copy.copy(match)
lfile = lambda f: lfutil.standin(f) in manifest
m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
- m._fileroots = set(m._files)
+ m._fileset = set(m._files)
origmatchfn = m.matchfn
def matchfn(f):
lfile = lfutil.splitstandin(f)
@@ -769,7 +767,7 @@
else:
matchfiles.append(f)
m._files = matchfiles
- m._fileroots = set(m._files)
+ m._fileset = set(m._files)
origmatchfn = m.matchfn
def matchfn(f):
lfile = lfutil.splitstandin(f)
--- a/hgext/largefiles/uisetup.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/largefiles/uisetup.py Tue Jun 20 16:33:46 2017 -0400
@@ -21,7 +21,6 @@
cmdutil,
commands,
copies,
- debugcommands,
exchange,
extensions,
filemerge,
@@ -40,11 +39,6 @@
)
def uisetup(ui):
- # TODO: debugcommands should use a separate command table
- # Side-effect of accessing is debugcommands module is guaranteed to be
- # imported and commands.table is populated.
- debugcommands.command
-
# Disable auto-status for some commands which assume that all
# files in the result are under Mercurial's control
--- a/hgext/mq.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/mq.py Tue Jun 20 16:33:46 2017 -0400
@@ -101,7 +101,7 @@
seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
@@ -932,14 +932,13 @@
merged.append(f)
else:
removed.append(f)
- repo.dirstate.beginparentchange()
- for f in removed:
- repo.dirstate.remove(f)
- for f in merged:
- repo.dirstate.merge(f)
- p1, p2 = repo.dirstate.parents()
- repo.setparents(p1, merge)
- repo.dirstate.endparentchange()
+ with repo.dirstate.parentchange():
+ for f in removed:
+ repo.dirstate.remove(f)
+ for f in merged:
+ repo.dirstate.merge(f)
+ p1, p2 = repo.dirstate.parents()
+ repo.setparents(p1, merge)
if all_files and '.hgsubstate' in all_files:
wctx = repo[None]
@@ -1580,16 +1579,15 @@
if keepchanges and tobackup:
raise error.Abort(_("local changes found, qrefresh first"))
self.backup(repo, tobackup)
- repo.dirstate.beginparentchange()
- for f in a:
- repo.wvfs.unlinkpath(f, ignoremissing=True)
- repo.dirstate.drop(f)
- for f in m + r:
- fctx = ctx[f]
- repo.wwrite(f, fctx.data(), fctx.flags())
- repo.dirstate.normal(f)
- repo.setparents(qp, nullid)
- repo.dirstate.endparentchange()
+ with repo.dirstate.parentchange():
+ for f in a:
+ repo.wvfs.unlinkpath(f, ignoremissing=True)
+ repo.dirstate.drop(f)
+ for f in m + r:
+ fctx = ctx[f]
+ repo.wwrite(f, fctx.data(), fctx.flags())
+ repo.dirstate.normal(f)
+ repo.setparents(qp, nullid)
for patch in reversed(self.applied[start:end]):
self.ui.status(_("popping %s\n") % patch.name)
del self.applied[start:end]
@@ -2409,7 +2407,7 @@
_('use uncompressed transfer (fast over LAN)')),
('p', 'patches', '',
_('location of source patch repository'), _('REPO')),
- ] + commands.remoteopts,
+ ] + cmdutil.remoteopts,
_('hg qclone [OPTION]... SOURCE [DEST]'),
norepo=True)
def clone(ui, source, dest=None, **opts):
@@ -2577,7 +2575,7 @@
('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
('d', 'date', '',
_('add "Date: <DATE>" to patch'), _('DATE'))
- ] + commands.walkopts + commands.commitopts,
+ ] + cmdutil.walkopts + cmdutil.commitopts,
_('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...'),
inferrepo=True)
def new(ui, repo, patch, *args, **opts):
@@ -2626,7 +2624,7 @@
_('add/update date field in patch with current date')),
('d', 'date', '',
_('add/update date field in patch with given date'), _('DATE'))
- ] + commands.walkopts + commands.commitopts,
+ ] + cmdutil.walkopts + cmdutil.commitopts,
_('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...'),
inferrepo=True)
def refresh(ui, repo, *pats, **opts):
@@ -2659,7 +2657,7 @@
return ret
@command("^qdiff",
- commands.diffopts + commands.diffopts2 + commands.walkopts,
+ cmdutil.diffopts + cmdutil.diffopts2 + cmdutil.walkopts,
_('hg qdiff [OPTION]... [FILE]...'),
inferrepo=True)
def diff(ui, repo, *pats, **opts):
@@ -2684,7 +2682,7 @@
@command('qfold',
[('e', 'edit', None, _('invoke editor on commit messages')),
('k', 'keep', None, _('keep folded patch files')),
- ] + commands.commitopts,
+ ] + cmdutil.commitopts,
_('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...'))
def fold(ui, repo, *files, **opts):
"""fold the named patches into the current patch
@@ -3046,7 +3044,7 @@
('n', 'name', '',
_('copy directory name'), _('NAME')),
('e', 'empty', None, _('clear queue status file')),
- ('f', 'force', None, _('force copy'))] + commands.commitopts,
+ ('f', 'force', None, _('force copy'))] + cmdutil.commitopts,
_('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'))
def save(ui, repo, **opts):
"""save current queue state (DEPRECATED)
@@ -3540,7 +3538,7 @@
"""Add --mq option to operate on patch repository instead of main"""
# some commands do not like getting unknown options
- mq = kwargs.pop('mq', None)
+ mq = kwargs.pop(r'mq', None)
if not mq:
return orig(ui, repo, *args, **kwargs)
--- a/hgext/notify.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/notify.py Tue Jun 20 16:33:46 2017 -0400
@@ -203,8 +203,9 @@
mapfile = self.ui.config('notify', 'style')
if not mapfile and not template:
template = deftemplates.get(hooktype) or single_template
- self.t = cmdutil.changeset_templater(self.ui, self.repo, False, None,
- template, mapfile, False)
+ spec = cmdutil.logtemplatespec(template, mapfile)
+ self.t = cmdutil.changeset_templater(self.ui, self.repo, spec,
+ False, None, False)
def strip(self, path):
'''strip leading slashes from local path, turn into web-safe path.'''
--- a/hgext/patchbomb.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/patchbomb.py Tue Jun 20 16:33:46 2017 -0400
@@ -89,6 +89,8 @@
mail,
node as nodemod,
patch,
+ registrar,
+ repair,
scmutil,
templater,
util,
@@ -96,7 +98,7 @@
stringio = util.stringio
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
@@ -110,10 +112,10 @@
# experimental config: patchbomb.publicurl
# waiting for some logic that check that the changeset are available on the
# destination before patchbombing anything.
- pullurl = repo.ui.config('patchbomb', 'publicurl')
- if pullurl is not None:
+ publicurl = repo.ui.config('patchbomb', 'publicurl')
+ if publicurl:
return ('Available At %s\n'
- '# hg pull %s -r %s' % (pullurl, pullurl, ctx))
+ '# hg pull %s -r %s' % (publicurl, publicurl, ctx))
return None
def uisetup(ui):
@@ -441,6 +443,7 @@
('o', 'outgoing', None,
_('send changes not found in the target repository')),
('b', 'bundle', None, _('send changes not in target as a binary bundle')),
+ ('B', 'bookmark', '', _('send changes only reachable by given bookmark')),
('', 'bundlename', 'bundle',
_('name of the bundle attachment file'), _('NAME')),
('r', 'rev', [], _('a revision to send'), _('REV')),
@@ -449,7 +452,7 @@
('', 'base', [], _('a base changeset to specify instead of a destination '
'(with -b/--bundle)'), _('REV')),
('', 'intro', None, _('send an introduction email for a single patch')),
- ] + emailopts + commands.remoteopts,
+ ] + emailopts + cmdutil.remoteopts,
_('hg email [OPTION]... [DEST]...'))
def email(ui, repo, *revs, **opts):
'''send changesets by email
@@ -479,6 +482,9 @@
body and as a regular or an inline attachment by combining the
-a/--attach or -i/--inline with the --body option.
+ With -B/--bookmark changesets reachable by the given bookmark are
+ selected.
+
With -o/--outgoing, emails will be generated for patches not found
in the destination repository (or only those which are ancestors
of the specified revisions if any are provided)
@@ -517,6 +523,8 @@
hg email -o -r 3000 # send all ancestors of 3000 not in default
hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
+ hg email -B feature # send all ancestors of feature bookmark
+
hg email -b # send bundle of all patches not in default
hg email -b DEST # send bundle of all patches not in DEST
hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
@@ -539,17 +547,20 @@
mbox = opts.get('mbox')
outgoing = opts.get('outgoing')
rev = opts.get('rev')
+ bookmark = opts.get('bookmark')
if not (opts.get('test') or mbox):
# really sending
mail.validateconfig(ui)
- if not (revs or rev or outgoing or bundle):
- raise error.Abort(_('specify at least one changeset with -r or -o'))
+ if not (revs or rev or outgoing or bundle or bookmark):
+ raise error.Abort(_('specify at least one changeset with -B, -r or -o'))
if outgoing and bundle:
raise error.Abort(_("--outgoing mode always on with --bundle;"
" do not re-specify --outgoing"))
+ if rev and bookmark:
+ raise error.Abort(_("-r and -B are mutually exclusive"))
if outgoing or bundle:
if len(revs) > 1:
@@ -564,6 +575,10 @@
if revs:
raise error.Abort(_('use only one form to specify the revision'))
revs = rev
+ elif bookmark:
+ if bookmark not in repo._bookmarks:
+ raise error.Abort(_("bookmark '%s' not found") % bookmark)
+ revs = repair.stripbmrevset(repo, bookmark)
revs = scmutil.revrange(repo, revs)
if outgoing:
@@ -573,7 +588,7 @@
# check if revision exist on the public destination
publicurl = repo.ui.config('patchbomb', 'publicurl')
- if publicurl is not None:
+ if publicurl:
repo.ui.debug('checking that revision exist in the public repo')
try:
publicpeer = hg.peer(repo, {}, publicurl)
@@ -645,15 +660,17 @@
if addr:
showaddrs.append('%s: %s' % (header, addr))
return mail.addrlistencode(ui, [addr], _charsets, opts.get('test'))
- else:
- return default
+ elif default:
+ return mail.addrlistencode(
+ ui, [default], _charsets, opts.get('test'))
+ return []
to = getaddrs('To', ask=True)
if not to:
# we can get here in non-interactive mode
raise error.Abort(_('no recipient addresses provided'))
- cc = getaddrs('Cc', ask=True, default='') or []
- bcc = getaddrs('Bcc') or []
+ cc = getaddrs('Cc', ask=True, default='')
+ bcc = getaddrs('Bcc')
replyto = getaddrs('Reply-To')
confirm = ui.configbool('patchbomb', 'confirm')
--- a/hgext/purge.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/purge.py Tue Jun 20 16:33:46 2017 -0400
@@ -30,14 +30,14 @@
from mercurial.i18n import _
from mercurial import (
cmdutil,
- commands,
error,
+ registrar,
scmutil,
util,
)
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
@@ -52,7 +52,7 @@
('p', 'print', None, _('print filenames instead of deleting them')),
('0', 'print0', None, _('end filenames with NUL, for use with xargs'
' (implies -p/--print)')),
- ] + commands.walkopts,
+ ] + cmdutil.walkopts,
_('hg purge [OPTION]... [DIR]...'))
def purge(ui, repo, *dirs, **opts):
'''removes files not tracked by Mercurial
--- a/hgext/rebase.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/rebase.py Tue Jun 20 16:33:46 2017 -0400
@@ -52,7 +52,7 @@
)
release = lock.release
-templateopts = commands.templateopts
+templateopts = cmdutil.templateopts
# The following constants are used throughout the rebase module. The ordering of
# their values must be maintained.
@@ -68,7 +68,7 @@
revskipped = (revignored, revprecursor, revpruned)
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
@@ -139,9 +139,9 @@
self.state = {}
self.activebookmark = None
self.currentbookmarks = None
- self.target = None
+ self.dest = None
self.skipped = set()
- self.targetancestors = set()
+ self.destancestors = set()
self.collapsef = opts.get('collapse', False)
self.collapsemsg = cmdutil.logmessage(ui, opts)
@@ -171,7 +171,7 @@
def _writestatus(self, f):
repo = self.repo.unfiltered()
f.write(repo[self.originalwd].hex() + '\n')
- f.write(repo[self.target].hex() + '\n')
+ f.write(repo[self.dest].hex() + '\n')
f.write(repo[self.external].hex() + '\n')
f.write('%d\n' % int(self.collapsef))
f.write('%d\n' % int(self.keepf))
@@ -194,7 +194,7 @@
"""Restore a previously stored status"""
repo = self.repo
keepbranches = None
- target = None
+ dest = None
collapse = False
external = nullrev
activebookmark = None
@@ -206,7 +206,7 @@
if i == 0:
originalwd = repo[l].rev()
elif i == 1:
- target = repo[l].rev()
+ dest = repo[l].rev()
elif i == 2:
external = repo[l].rev()
elif i == 3:
@@ -242,7 +242,7 @@
skipped = set()
# recompute the set of skipped revs
if not collapse:
- seen = set([target])
+ seen = {dest}
for old, new in sorted(state.items()):
if new != revtodo and new in seen:
skipped.add(old)
@@ -250,10 +250,10 @@
repo.ui.debug('computed skipped revs: %s\n' %
(' '.join(str(r) for r in sorted(skipped)) or None))
repo.ui.debug('rebase status resumed\n')
- _setrebasesetvisibility(repo, set(state.keys()) | set([originalwd]))
+ _setrebasesetvisibility(repo, set(state.keys()) | {originalwd})
self.originalwd = originalwd
- self.target = target
+ self.dest = dest
self.state = state
self.skipped = skipped
self.collapsef = collapse
@@ -262,12 +262,12 @@
self.external = external
self.activebookmark = activebookmark
- def _handleskippingobsolete(self, rebaserevs, obsoleterevs, target):
+ def _handleskippingobsolete(self, rebaserevs, obsoleterevs, dest):
"""Compute structures necessary for skipping obsolete revisions
rebaserevs: iterable of all revisions that are to be rebased
obsoleterevs: iterable of all obsolete revisions in rebaseset
- target: a destination revision for the rebase operation
+ dest: a destination revision for the rebase operation
"""
self.obsoletenotrebased = {}
if not self.ui.configbool('experimental', 'rebaseskipobsolete',
@@ -276,7 +276,7 @@
rebaseset = set(rebaserevs)
obsoleteset = set(obsoleterevs)
self.obsoletenotrebased = _computeobsoletenotrebased(self.repo,
- obsoleteset, target)
+ obsoleteset, dest)
skippedset = set(self.obsoletenotrebased)
_checkobsrebase(self.repo, self.ui, obsoleteset, rebaseset, skippedset)
@@ -296,11 +296,11 @@
hint = _('use "hg rebase --abort" to clear broken state')
raise error.Abort(msg, hint=hint)
if isabort:
- return abort(self.repo, self.originalwd, self.target,
+ return abort(self.repo, self.originalwd, self.dest,
self.state, activebookmark=self.activebookmark)
obsrevs = (r for r, st in self.state.items() if st == revprecursor)
- self._handleskippingobsolete(self.state.keys(), obsrevs, self.target)
+ self._handleskippingobsolete(self.state.keys(), obsrevs, self.dest)
def _preparenewrebase(self, dest, rebaseset):
if dest is None:
@@ -332,13 +332,13 @@
% root,
hint=_("see 'hg help phases' for details"))
- (self.originalwd, self.target, self.state) = result
+ (self.originalwd, self.dest, self.state) = result
if self.collapsef:
- self.targetancestors = self.repo.changelog.ancestors(
- [self.target],
+ self.destancestors = self.repo.changelog.ancestors(
+ [self.dest],
inclusive=True)
self.external = externalparent(self.repo, self.state,
- self.targetancestors)
+ self.destancestors)
if dest.closesbranch() and not self.keepbranchesf:
self.ui.status(_('reopening closed branch head %s\n') % dest)
@@ -359,9 +359,9 @@
'branches'))
# Rebase
- if not self.targetancestors:
- self.targetancestors = repo.changelog.ancestors([self.target],
- inclusive=True)
+ if not self.destancestors:
+ self.destancestors = repo.changelog.ancestors([self.dest],
+ inclusive=True)
# Keep track of the current bookmarks in order to reset them later
self.currentbookmarks = repo._bookmarks.copy()
@@ -384,14 +384,16 @@
names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
if names:
desc += ' (%s)' % ' '.join(names)
- if self.state[rev] == revtodo:
+ if self.state[rev] == rev:
+ ui.status(_('already rebased %s\n') % desc)
+ elif self.state[rev] == revtodo:
pos += 1
ui.status(_('rebasing %s\n') % desc)
ui.progress(_("rebasing"), pos, ("%d:%s" % (rev, ctx)),
_('changesets'), total)
- p1, p2, base = defineparents(repo, rev, self.target,
+ p1, p2, base = defineparents(repo, rev, self.dest,
self.state,
- self.targetancestors,
+ self.destancestors,
self.obsoletenotrebased)
self.storestatus(tr=tr)
storecollapsemsg(repo, self.collapsemsg)
@@ -402,7 +404,7 @@
ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
'rebase')
stats = rebasenode(repo, rev, p1, base, self.state,
- self.collapsef, self.target)
+ self.collapsef, self.dest)
if stats and stats[3] > 0:
raise error.InterventionRequired(
_('unresolved conflicts (see hg '
@@ -418,11 +420,14 @@
editor=editor,
keepbranches=self.keepbranchesf,
date=self.date)
+ if newnode is None:
+ # If it ended up being a no-op commit, then the normal
+ # merge state clean-up path doesn't happen, so do it
+ # here. Fix issue5494
+ mergemod.mergestate.clean(repo)
else:
# Skip commit if we are collapsing
- repo.dirstate.beginparentchange()
repo.setparents(repo[p1].node())
- repo.dirstate.endparentchange()
newnode = None
# Update the state
if newnode is not None:
@@ -440,11 +445,11 @@
elif self.state[rev] == revignored:
ui.status(_('not rebasing ignored %s\n') % desc)
elif self.state[rev] == revprecursor:
- targetctx = repo[self.obsoletenotrebased[rev]]
- desctarget = '%d:%s "%s"' % (targetctx.rev(), targetctx,
- targetctx.description().split('\n', 1)[0])
+ destctx = repo[self.obsoletenotrebased[rev]]
+ descdest = '%d:%s "%s"' % (destctx.rev(), destctx,
+ destctx.description().split('\n', 1)[0])
msg = _('note: not rebasing %s, already in destination as %s\n')
- ui.status(msg % (desc, desctarget))
+ ui.status(msg % (desc, descdest))
elif self.state[rev] == revpruned:
msg = _('note: not rebasing %s, it has no successor\n')
ui.status(msg % desc)
@@ -459,8 +464,8 @@
repo, ui, opts = self.repo, self.ui, self.opts
if self.collapsef and not self.keepopen:
p1, p2, _base = defineparents(repo, min(self.state),
- self.target, self.state,
- self.targetancestors,
+ self.dest, self.state,
+ self.destancestors,
self.obsoletenotrebased)
editopt = opts.get('edit')
editform = 'rebase.collapse'
@@ -494,7 +499,7 @@
raise
if newnode is None:
- newrev = self.target
+ newrev = self.dest
else:
newrev = repo[newnode].rev()
for oldrev in self.state.iterkeys():
@@ -508,14 +513,14 @@
# Nodeids are needed to reset bookmarks
nstate = {}
for k, v in self.state.iteritems():
- if v > nullmerge:
+ if v > nullmerge and v != k:
nstate[repo[k].node()] = repo[v].node()
elif v == revprecursor:
succ = self.obsoletenotrebased[k]
nstate[repo[k].node()] = repo[succ].node()
# XXX this is the same as dest.node() for the non-continue path --
# this should probably be cleaned up
- targetnode = repo[self.target].node()
+ destnode = repo[self.dest].node()
# restore original working directory
# (we do this before stripping)
@@ -531,7 +536,7 @@
if self.currentbookmarks:
with repo.transaction('bookmark') as tr:
- updatebookmarks(repo, targetnode, nstate,
+ updatebookmarks(repo, destnode, nstate,
self.currentbookmarks, tr)
if self.activebookmark not in repo._bookmarks:
# active bookmark was divergent one and has been deleted
@@ -678,11 +683,7 @@
"""
rbsrt = rebaseruntime(repo, ui, opts)
- lock = wlock = None
- try:
- wlock = repo.wlock()
- lock = repo.lock()
-
+ with repo.wlock(), repo.lock():
# Validate input and define rebasing points
destf = opts.get('dest', None)
srcf = opts.get('source', None)
@@ -748,8 +749,6 @@
release(dsguard)
raise
rbsrt._finishrebase()
- finally:
- release(lock, wlock)
def _definesets(ui, repo, destf=None, srcf=None, basef=None, revf=None,
destspace=None):
@@ -846,9 +845,9 @@
return dest, rebaseset
-def externalparent(repo, state, targetancestors):
+def externalparent(repo, state, destancestors):
"""Return the revision that should be used as the second parent
- when the revisions in state is collapsed on top of targetancestors.
+ when the revisions in state is collapsed on top of destancestors.
Abort if there is more than one parent.
"""
parents = set()
@@ -858,7 +857,7 @@
continue
for p in repo[rev].parents():
if (p.rev() not in state
- and p.rev() not in targetancestors):
+ and p.rev() not in destancestors):
parents.add(p.rev())
if not parents:
return nullrev
@@ -866,7 +865,7 @@
return parents.pop()
raise error.Abort(_('unable to collapse on top of %s, there is more '
'than one external parent: %s') %
- (max(targetancestors),
+ (max(destancestors),
', '.join(str(p) for p in sorted(parents))))
def concludenode(repo, rev, p1, p2, commitmsg=None, editor=None, extrafn=None,
@@ -883,8 +882,8 @@
if extrafn:
extrafn(ctx, extra)
- targetphase = max(ctx.phase(), phases.draft)
- overrides = {('phases', 'new-commit'): targetphase}
+ destphase = max(ctx.phase(), phases.draft)
+ overrides = {('phases', 'new-commit'): destphase}
with repo.ui.configoverride(overrides, 'rebase'):
if keepbranch:
repo.ui.setconfig('ui', 'allowemptycommit', True)
@@ -897,15 +896,15 @@
repo.dirstate.setbranch(repo[newnode].branch())
return newnode
-def rebasenode(repo, rev, p1, base, state, collapse, target):
+def rebasenode(repo, rev, p1, base, state, collapse, dest):
'Rebase a single revision rev on top of p1 using base as merge ancestor'
# Merge phase
- # Update to target and merge it with local
+ # Update to destination and merge it with local
if repo['.'].rev() != p1:
repo.ui.debug(" update to %d:%s\n" % (p1, repo[p1]))
mergemod.update(repo, p1, False, True)
else:
- repo.ui.debug(" already in target\n")
+ repo.ui.debug(" already in destination\n")
repo.dirstate.write(repo.currenttransaction())
repo.ui.debug(" merge against %d:%s\n" % (rev, repo[rev]))
if base is not None:
@@ -915,7 +914,7 @@
stats = mergemod.update(repo, rev, True, True, base, collapse,
labels=['dest', 'source'])
if collapse:
- copies.duplicatecopies(repo, rev, target)
+ copies.duplicatecopies(repo, rev, dest)
else:
# If we're not using --collapse, we need to
# duplicate copies between the revision we're
@@ -923,7 +922,7 @@
# duplicate any copies that have already been
# performed in the destination.
p1rev = repo[rev].p1().rev()
- copies.duplicatecopies(repo, rev, p1rev, skiprev=target)
+ copies.duplicatecopies(repo, rev, p1rev, skiprev=dest)
return stats
def nearestrebased(repo, rev, state):
@@ -958,7 +957,7 @@
"experimental.allowdivergence=True")
raise error.Abort(msg % (",".join(divhashes),), hint=h)
-def defineparents(repo, rev, target, state, targetancestors,
+def defineparents(repo, rev, dest, state, destancestors,
obsoletenotrebased):
'Return the new parent relationship of the revision that will be rebased'
parents = repo[rev].parents()
@@ -966,26 +965,26 @@
rp1 = None
p1n = parents[0].rev()
- if p1n in targetancestors:
- p1 = target
+ if p1n in destancestors:
+ p1 = dest
elif p1n in state:
if state[p1n] == nullmerge:
- p1 = target
+ p1 = dest
elif state[p1n] in revskipped:
p1 = nearestrebased(repo, p1n, state)
if p1 is None:
- p1 = target
+ p1 = dest
else:
p1 = state[p1n]
else: # p1n external
- p1 = target
+ p1 = dest
p2 = p1n
- if len(parents) == 2 and parents[1].rev() not in targetancestors:
+ if len(parents) == 2 and parents[1].rev() not in destancestors:
p2n = parents[1].rev()
# interesting second parent
if p2n in state:
- if p1 == target: # p1n in targetancestors or external
+ if p1 == dest: # p1n in destancestors or external
p1 = state[p2n]
if p1 == revprecursor:
rp1 = obsoletenotrebased[p2n]
@@ -993,7 +992,7 @@
p2 = nearestrebased(repo, p2n, state)
if p2 is None:
# no ancestors rebased yet, detach
- p2 = target
+ p2 = dest
else:
p2 = state[p2n]
else: # p2n external
@@ -1109,14 +1108,14 @@
mq.seriesdirty = True
mq.savedirty()
-def updatebookmarks(repo, targetnode, nstate, originalbookmarks, tr):
+def updatebookmarks(repo, destnode, nstate, originalbookmarks, tr):
'Move bookmarks to their correct changesets, and delete divergent ones'
marks = repo._bookmarks
for k, v in originalbookmarks.iteritems():
if v in nstate:
# update the bookmarks for revs that have moved
marks[k] = nstate[v]
- bookmarks.deletedivergent(repo, [targetnode], k)
+ bookmarks.deletedivergent(repo, [destnode], k)
marks.recordchange(tr)
def storecollapsemsg(repo, collapsemsg):
@@ -1168,7 +1167,7 @@
return False
-def abort(repo, originalwd, target, state, activebookmark=None):
+def abort(repo, originalwd, dest, state, activebookmark=None):
'''Restore the repository to its original state. Additional args:
activebookmark: the name of the bookmark that should be active after the
@@ -1176,9 +1175,9 @@
try:
# If the first commits in the rebased set get skipped during the rebase,
- # their values within the state mapping will be the target rev id. The
- # dstates list must must not contain the target rev (issue4896)
- dstates = [s for s in state.values() if s >= 0 and s != target]
+ # their values within the state mapping will be the dest rev id. The
+ # dstates list must must not contain the dest rev (issue4896)
+ dstates = [s for s in state.values() if s >= 0 and s != dest]
immutable = [d for d in dstates if not repo[d].mutable()]
cleanup = True
if immutable:
@@ -1191,19 +1190,19 @@
if dstates:
descendants = set(repo.changelog.descendants(dstates))
if descendants - set(dstates):
- repo.ui.warn(_("warning: new changesets detected on target branch, "
- "can't strip\n"))
+ repo.ui.warn(_("warning: new changesets detected on destination "
+ "branch, can't strip\n"))
cleanup = False
if cleanup:
shouldupdate = False
- rebased = filter(lambda x: x >= 0 and x != target, state.values())
+ rebased = filter(lambda x: x >= 0 and x != dest, state.values())
if rebased:
strippoints = [
c.node() for c in repo.set('roots(%ld)', rebased)]
updateifonnodes = set(rebased)
- updateifonnodes.add(target)
+ updateifonnodes.add(dest)
updateifonnodes.add(originalwd)
shouldupdate = repo['.'].rev() in updateifonnodes
@@ -1233,7 +1232,7 @@
rebaseset: set of rev
'''
originalwd = repo['.'].rev()
- _setrebasesetvisibility(repo, set(rebaseset) | set([originalwd]))
+ _setrebasesetvisibility(repo, set(rebaseset) | {originalwd})
# This check isn't strictly necessary, since mq detects commits over an
# applied patch. But it prevents messing up the working directory when
@@ -1246,8 +1245,9 @@
if not roots:
raise error.Abort(_('no matching revisions'))
roots.sort()
- state = {}
+ state = dict.fromkeys(rebaseset, revtodo)
detachset = set()
+ emptyrebase = True
for root in roots:
commonbase = root.ancestor(dest)
if commonbase == root:
@@ -1259,12 +1259,15 @@
samebranch = root.branch() == wctx.branch()
else:
samebranch = root.branch() == dest.branch()
- if not collapse and samebranch and root in dest.children():
+ if not collapse and samebranch and dest in root.parents():
+ # mark the revision as done by setting its new revision
+ # equal to its old (current) revisions
+ state[root.rev()] = root.rev()
repo.ui.debug('source is a child of destination\n')
- return None
+ continue
+ emptyrebase = False
repo.ui.debug('rebase onto %s starting from %s\n' % (dest, root))
- state.update(dict.fromkeys(rebaseset, revtodo))
# Rebase tries to turn <dest> into a parent of <root> while
# preserving the number of parents of rebased changesets:
#
@@ -1306,6 +1309,13 @@
# ancestors of <root> not ancestors of <dest>
detachset.update(repo.changelog.findmissingrevs([commonbase.rev()],
[root.rev()]))
+ if emptyrebase:
+ return None
+ for rev in sorted(state):
+ parents = [p for p in repo.changelog.parentrevs(rev) if p != nullrev]
+ # if all parents of this revision are done, then so is this revision
+ if parents and all((state.get(p) == p for p in parents)):
+ state[rev] = rev
for r in detachset:
if r not in state:
state[r] = nullmerge
@@ -1333,7 +1343,7 @@
if obsolete.isenabled(repo, obsolete.createmarkersopt):
markers = []
for rev, newrev in sorted(state.items()):
- if newrev >= 0:
+ if newrev >= 0 and newrev != rev:
if rev in skipped:
succs = ()
elif collapsedas is not None:
@@ -1342,9 +1352,10 @@
succs = (repo[newrev],)
markers.append((repo[rev], succs))
if markers:
- obsolete.createmarkers(repo, markers)
+ obsolete.createmarkers(repo, markers, operation='rebase')
else:
- rebased = [rev for rev in state if state[rev] > nullmerge]
+ rebased = [rev for rev in state
+ if state[rev] > nullmerge and state[rev] != rev]
if rebased:
stripped = []
for root in repo.set('roots(%ld)', rebased):
@@ -1367,10 +1378,7 @@
hint = _('use hg pull followed by hg rebase -d DEST')
raise error.Abort(msg, hint=hint)
- wlock = lock = None
- try:
- wlock = repo.wlock()
- lock = repo.lock()
+ with repo.wlock(), repo.lock():
if opts.get('update'):
del opts['update']
ui.debug('--update and --rebase are not compatible, ignoring '
@@ -1414,8 +1422,6 @@
# not passing argument to get the bare update behavior
# with warning and trumpets
commands.update(ui, repo)
- finally:
- release(lock, wlock)
else:
if opts.get('tool'):
raise error.Abort(_('--tool can only be used with --rebase'))
@@ -1519,4 +1525,4 @@
cmdutil.afterresolvedstates.append(
['rebasestate', _('hg rebase --continue')])
# ensure rebased rev are not hidden
- extensions.wrapfunction(repoview, '_getdynamicblockers', _rebasedvisible)
+ extensions.wrapfunction(repoview, 'pinnedrevs', _rebasedvisible)
--- a/hgext/record.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/record.py Tue Jun 20 16:33:46 2017 -0400
@@ -18,10 +18,11 @@
commands,
error,
extensions,
+ registrar,
)
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
@@ -32,7 +33,7 @@
@command("record",
# same options as commit + white space diff options
[c for c in commands.table['^commit|ci'][1][:]
- if c[1] != "interactive"] + commands.diffwsopts,
+ if c[1] != "interactive"] + cmdutil.diffwsopts,
_('hg record [OPTION]... [FILE]...'))
def record(ui, repo, *pats, **opts):
'''interactively select changes to commit
@@ -135,7 +136,7 @@
(qrecord,
# same options as qnew, but copy them so we don't get
# -i/--interactive for qrecord and add white space diff options
- mq.cmdtable['^qnew'][1][:] + commands.diffwsopts,
+ mq.cmdtable['^qnew'][1][:] + cmdutil.diffwsopts,
_('hg qrecord [OPTION]... PATCH [FILE]...'))
_wrapcmd('qnew', mq.cmdtable, qnew, _("interactively record a new patch"))
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/releasenotes.py Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,429 @@
+# Copyright 2017-present Gregory Szorc <gregory.szorc@gmail.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+"""generate release notes from commit messages (EXPERIMENTAL)
+
+It is common to maintain files detailing changes in a project between
+releases. Maintaining these files can be difficult and time consuming.
+The :hg:`releasenotes` command provided by this extension makes the
+process simpler by automating it.
+"""
+
+from __future__ import absolute_import
+
+import errno
+import re
+import sys
+import textwrap
+
+from mercurial.i18n import _
+from mercurial import (
+ error,
+ minirst,
+ registrar,
+ scmutil,
+)
+
+cmdtable = {}
+command = registrar.command(cmdtable)
+
+# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
+# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
+# be specifying the version(s) of Mercurial they are tested with, or
+# leave the attribute unspecified.
+testedwith = 'ships-with-hg-core'
+
+DEFAULT_SECTIONS = [
+ ('feature', _('New Features')),
+ ('bc', _('Backwards Compatibility Changes')),
+ ('fix', _('Bug Fixes')),
+ ('perf', _('Performance Improvements')),
+ ('api', _('API Changes')),
+]
+
+RE_DIRECTIVE = re.compile('^\.\. ([a-zA-Z0-9_]+)::\s*([^$]+)?$')
+
+BULLET_SECTION = _('Other Changes')
+
+class parsedreleasenotes(object):
+ def __init__(self):
+ self.sections = {}
+
+ def __contains__(self, section):
+ return section in self.sections
+
+ def __iter__(self):
+ return iter(sorted(self.sections))
+
+ def addtitleditem(self, section, title, paragraphs):
+ """Add a titled release note entry."""
+ self.sections.setdefault(section, ([], []))
+ self.sections[section][0].append((title, paragraphs))
+
+ def addnontitleditem(self, section, paragraphs):
+ """Adds a non-titled release note entry.
+
+ Will be rendered as a bullet point.
+ """
+ self.sections.setdefault(section, ([], []))
+ self.sections[section][1].append(paragraphs)
+
+ def titledforsection(self, section):
+ """Returns titled entries in a section.
+
+ Returns a list of (title, paragraphs) tuples describing sub-sections.
+ """
+ return self.sections.get(section, ([], []))[0]
+
+ def nontitledforsection(self, section):
+ """Returns non-titled, bulleted paragraphs in a section."""
+ return self.sections.get(section, ([], []))[1]
+
+ def hastitledinsection(self, section, title):
+ return any(t[0] == title for t in self.titledforsection(section))
+
+ def merge(self, ui, other):
+ """Merge another instance into this one.
+
+ This is used to combine multiple sources of release notes together.
+ """
+ for section in other:
+ for title, paragraphs in other.titledforsection(section):
+ if self.hastitledinsection(section, title):
+ # TODO prompt for resolution if different and running in
+ # interactive mode.
+ ui.write(_('%s already exists in %s section; ignoring\n') %
+ (title, section))
+ continue
+
+ # TODO perform similarity comparison and try to match against
+ # existing.
+ self.addtitleditem(section, title, paragraphs)
+
+ for paragraphs in other.nontitledforsection(section):
+ if paragraphs in self.nontitledforsection(section):
+ continue
+
+ # TODO perform similarily comparison and try to match against
+ # existing.
+ self.addnontitleditem(section, paragraphs)
+
+class releasenotessections(object):
+ def __init__(self, ui):
+ # TODO support defining custom sections from config.
+ self._sections = list(DEFAULT_SECTIONS)
+
+ def __iter__(self):
+ return iter(self._sections)
+
+ def names(self):
+ return [t[0] for t in self._sections]
+
+ def sectionfromtitle(self, title):
+ for name, value in self._sections:
+ if value == title:
+ return name
+
+ return None
+
+def parsenotesfromrevisions(repo, directives, revs):
+ notes = parsedreleasenotes()
+
+ for rev in revs:
+ ctx = repo[rev]
+
+ blocks, pruned = minirst.parse(ctx.description(),
+ admonitions=directives)
+
+ for i, block in enumerate(blocks):
+ if block['type'] != 'admonition':
+ continue
+
+ directive = block['admonitiontitle']
+ title = block['lines'][0].strip() if block['lines'] else None
+
+ if i + 1 == len(blocks):
+ raise error.Abort(_('release notes directive %s lacks content')
+ % directive)
+
+ # Now search ahead and find all paragraphs attached to this
+ # admonition.
+ paragraphs = []
+ for j in range(i + 1, len(blocks)):
+ pblock = blocks[j]
+
+ # Margin blocks may appear between paragraphs. Ignore them.
+ if pblock['type'] == 'margin':
+ continue
+
+ if pblock['type'] != 'paragraph':
+ raise error.Abort(_('unexpected block in release notes '
+ 'directive %s') % directive)
+
+ if pblock['indent'] > 0:
+ paragraphs.append(pblock['lines'])
+ else:
+ break
+
+ # TODO consider using title as paragraph for more concise notes.
+ if not paragraphs:
+ raise error.Abort(_('could not find content for release note '
+ '%s') % directive)
+
+ if title:
+ notes.addtitleditem(directive, title, paragraphs)
+ else:
+ notes.addnontitleditem(directive, paragraphs)
+
+ return notes
+
+def parsereleasenotesfile(sections, text):
+ """Parse text content containing generated release notes."""
+ notes = parsedreleasenotes()
+
+ blocks = minirst.parse(text)[0]
+
+ def gatherparagraphs(offset):
+ paragraphs = []
+
+ for i in range(offset + 1, len(blocks)):
+ block = blocks[i]
+
+ if block['type'] == 'margin':
+ continue
+ elif block['type'] == 'section':
+ break
+ elif block['type'] == 'bullet':
+ if block['indent'] != 0:
+ raise error.Abort(_('indented bullet lists not supported'))
+
+ lines = [l[1:].strip() for l in block['lines']]
+ paragraphs.append(lines)
+ continue
+ elif block['type'] != 'paragraph':
+ raise error.Abort(_('unexpected block type in release notes: '
+ '%s') % block['type'])
+
+ paragraphs.append(block['lines'])
+
+ return paragraphs
+
+ currentsection = None
+ for i, block in enumerate(blocks):
+ if block['type'] != 'section':
+ continue
+
+ title = block['lines'][0]
+
+ # TODO the parsing around paragraphs and bullet points needs some
+ # work.
+ if block['underline'] == '=': # main section
+ name = sections.sectionfromtitle(title)
+ if not name:
+ raise error.Abort(_('unknown release notes section: %s') %
+ title)
+
+ currentsection = name
+ paragraphs = gatherparagraphs(i)
+ if paragraphs:
+ notes.addnontitleditem(currentsection, paragraphs)
+
+ elif block['underline'] == '-': # sub-section
+ paragraphs = gatherparagraphs(i)
+
+ if title == BULLET_SECTION:
+ notes.addnontitleditem(currentsection, paragraphs)
+ else:
+ notes.addtitleditem(currentsection, title, paragraphs)
+ else:
+ raise error.Abort(_('unsupported section type for %s') % title)
+
+ return notes
+
+def serializenotes(sections, notes):
+ """Serialize release notes from parsed fragments and notes.
+
+ This function essentially takes the output of ``parsenotesfromrevisions()``
+ and ``parserelnotesfile()`` and produces output combining the 2.
+ """
+ lines = []
+
+ for sectionname, sectiontitle in sections:
+ if sectionname not in notes:
+ continue
+
+ lines.append(sectiontitle)
+ lines.append('=' * len(sectiontitle))
+ lines.append('')
+
+ # First pass to emit sub-sections.
+ for title, paragraphs in notes.titledforsection(sectionname):
+ lines.append(title)
+ lines.append('-' * len(title))
+ lines.append('')
+
+ wrapper = textwrap.TextWrapper(width=78)
+ for i, para in enumerate(paragraphs):
+ if i:
+ lines.append('')
+ lines.extend(wrapper.wrap(' '.join(para)))
+
+ lines.append('')
+
+ # Second pass to emit bullet list items.
+
+ # If the section has titled and non-titled items, we can't
+ # simply emit the bullet list because it would appear to come
+ # from the last title/section. So, we emit a new sub-section
+ # for the non-titled items.
+ nontitled = notes.nontitledforsection(sectionname)
+ if notes.titledforsection(sectionname) and nontitled:
+ # TODO make configurable.
+ lines.append(BULLET_SECTION)
+ lines.append('-' * len(BULLET_SECTION))
+ lines.append('')
+
+ for paragraphs in nontitled:
+ wrapper = textwrap.TextWrapper(initial_indent='* ',
+ subsequent_indent=' ',
+ width=78)
+ lines.extend(wrapper.wrap(' '.join(paragraphs[0])))
+
+ wrapper = textwrap.TextWrapper(initial_indent=' ',
+ subsequent_indent=' ',
+ width=78)
+ for para in paragraphs[1:]:
+ lines.append('')
+ lines.extend(wrapper.wrap(' '.join(para)))
+
+ lines.append('')
+
+ if lines[-1]:
+ lines.append('')
+
+ return '\n'.join(lines)
+
+@command('releasenotes',
+ [('r', 'rev', '', _('revisions to process for release notes'), _('REV'))],
+ _('[-r REV] FILE'))
+def releasenotes(ui, repo, file_, rev=None):
+ """parse release notes from commit messages into an output file
+
+ Given an output file and set of revisions, this command will parse commit
+ messages for release notes then add them to the output file.
+
+ Release notes are defined in commit messages as ReStructuredText
+ directives. These have the form::
+
+ .. directive:: title
+
+ content
+
+ Each ``directive`` maps to an output section in a generated release notes
+ file, which itself is ReStructuredText. For example, the ``.. feature::``
+ directive would map to a ``New Features`` section.
+
+ Release note directives can be either short-form or long-form. In short-
+ form, ``title`` is omitted and the release note is rendered as a bullet
+ list. In long form, a sub-section with the title ``title`` is added to the
+ section.
+
+ The ``FILE`` argument controls the output file to write gathered release
+ notes to. The format of the file is::
+
+ Section 1
+ =========
+
+ ...
+
+ Section 2
+ =========
+
+ ...
+
+ Only sections with defined release notes are emitted.
+
+ If a section only has short-form notes, it will consist of bullet list::
+
+ Section
+ =======
+
+ * Release note 1
+ * Release note 2
+
+ If a section has long-form notes, sub-sections will be emitted::
+
+ Section
+ =======
+
+ Note 1 Title
+ ------------
+
+ Description of the first long-form note.
+
+ Note 2 Title
+ ------------
+
+ Description of the second long-form note.
+
+ If the ``FILE`` argument points to an existing file, that file will be
+ parsed for release notes having the format that would be generated by this
+ command. The notes from the processed commit messages will be *merged*
+ into this parsed set.
+
+ During release notes merging:
+
+ * Duplicate items are automatically ignored
+ * Items that are different are automatically ignored if the similarity is
+ greater than a threshold.
+
+ This means that the release notes file can be updated independently from
+ this command and changes should not be lost when running this command on
+ that file. A particular use case for this is to tweak the wording of a
+ release note after it has been added to the release notes file.
+ """
+ sections = releasenotessections(ui)
+
+ revs = scmutil.revrange(repo, [rev or 'not public()'])
+ incoming = parsenotesfromrevisions(repo, sections.names(), revs)
+
+ try:
+ with open(file_, 'rb') as fh:
+ notes = parsereleasenotesfile(sections, fh.read())
+ except IOError as e:
+ if e.errno != errno.ENOENT:
+ raise
+
+ notes = parsedreleasenotes()
+
+ notes.merge(ui, incoming)
+
+ with open(file_, 'wb') as fh:
+ fh.write(serializenotes(sections, notes))
+
+@command('debugparsereleasenotes', norepo=True)
+def debugparsereleasenotes(ui, path):
+ """parse release notes and print resulting data structure"""
+ if path == '-':
+ text = sys.stdin.read()
+ else:
+ with open(path, 'rb') as fh:
+ text = fh.read()
+
+ sections = releasenotessections(ui)
+
+ notes = parsereleasenotesfile(sections, text)
+
+ for section in notes:
+ ui.write(_('section: %s\n') % section)
+ for title, paragraphs in notes.titledforsection(section):
+ ui.write(_(' subsection: %s\n') % title)
+ for para in paragraphs:
+ ui.write(_(' paragraph: %s\n') % ' '.join(para))
+
+ for paragraphs in notes.nontitledforsection(section):
+ ui.write(_(' bullet point:\n'))
+ for para in paragraphs:
+ ui.write(_(' paragraph: %s\n') % ' '.join(para))
--- a/hgext/relink.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/relink.py Tue Jun 20 16:33:46 2017 -0400
@@ -13,14 +13,14 @@
from mercurial.i18n import _
from mercurial import (
- cmdutil,
error,
hg,
+ registrar,
util,
)
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
--- a/hgext/schemes.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/schemes.py Tue Jun 20 16:33:46 2017 -0400
@@ -46,17 +46,17 @@
from mercurial.i18n import _
from mercurial import (
- cmdutil,
error,
extensions,
hg,
pycompat,
+ registrar,
templater,
util,
)
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
--- a/hgext/share.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/share.py Tue Jun 20 16:33:46 2017 -0400
@@ -43,11 +43,11 @@
from mercurial.i18n import _
from mercurial import (
bookmarks,
- cmdutil,
commands,
error,
extensions,
hg,
+ registrar,
txnutil,
util,
)
@@ -56,7 +56,7 @@
parseurl = hg.parseurl
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
--- a/hgext/shelve.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/shelve.py Tue Jun 20 16:33:46 2017 -0400
@@ -33,7 +33,6 @@
bundlerepo,
changegroup,
cmdutil,
- commands,
error,
exchange,
hg,
@@ -43,6 +42,7 @@
node as nodemod,
patch,
phases,
+ registrar,
repair,
scmutil,
templatefilters,
@@ -55,7 +55,7 @@
)
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
@@ -127,10 +127,10 @@
try:
gen = exchange.readbundle(self.repo.ui, fp, self.fname, self.vfs)
if not isinstance(gen, bundle2.unbundle20):
- gen.apply(self.repo, 'unshelve',
+ gen.apply(self.repo, self.repo.currenttransaction(), 'unshelve',
'bundle:' + self.vfs.join(self.fname),
targetphase=phases.secret)
- if isinstance(gen, bundle2.unbundle20):
+ else:
bundle2.applybundle(self.repo, gen,
self.repo.currenttransaction(),
source='unshelve',
@@ -167,7 +167,7 @@
Handles saving and restoring a shelved state. Ensures that different
versions of a shelved state are possible and handles them appropriately.
"""
- _version = 1
+ _version = 2
_filename = 'shelvedstate'
_keep = 'keep'
_nokeep = 'nokeep'
@@ -175,40 +175,75 @@
_noactivebook = ':no-active-bookmark'
@classmethod
- def load(cls, repo):
+ def _verifyandtransform(cls, d):
+ """Some basic shelvestate syntactic verification and transformation"""
+ try:
+ d['originalwctx'] = nodemod.bin(d['originalwctx'])
+ d['pendingctx'] = nodemod.bin(d['pendingctx'])
+ d['parents'] = [nodemod.bin(h)
+ for h in d['parents'].split(' ')]
+ d['nodestoremove'] = [nodemod.bin(h)
+ for h in d['nodestoremove'].split(' ')]
+ except (ValueError, TypeError, KeyError) as err:
+ raise error.CorruptedState(str(err))
+
+ @classmethod
+ def _getversion(cls, repo):
+ """Read version information from shelvestate file"""
fp = repo.vfs(cls._filename)
try:
version = int(fp.readline().strip())
-
- if version != cls._version:
- raise error.Abort(_('this version of shelve is incompatible '
- 'with the version used in this repo'))
- name = fp.readline().strip()
- wctx = nodemod.bin(fp.readline().strip())
- pendingctx = nodemod.bin(fp.readline().strip())
- parents = [nodemod.bin(h) for h in fp.readline().split()]
- nodestoremove = [nodemod.bin(h) for h in fp.readline().split()]
- branchtorestore = fp.readline().strip()
- keep = fp.readline().strip() == cls._keep
- activebook = fp.readline().strip()
- except (ValueError, TypeError) as err:
+ except ValueError as err:
raise error.CorruptedState(str(err))
finally:
fp.close()
+ return version
+ @classmethod
+ def _readold(cls, repo):
+ """Read the old position-based version of a shelvestate file"""
+ # Order is important, because old shelvestate file uses it
+ # to detemine values of fields (i.g. name is on the second line,
+ # originalwctx is on the third and so forth). Please do not change.
+ keys = ['version', 'name', 'originalwctx', 'pendingctx', 'parents',
+ 'nodestoremove', 'branchtorestore', 'keep', 'activebook']
+ # this is executed only seldomly, so it is not a big deal
+ # that we open this file twice
+ fp = repo.vfs(cls._filename)
+ d = {}
+ try:
+ for key in keys:
+ d[key] = fp.readline().strip()
+ finally:
+ fp.close()
+ return d
+
+ @classmethod
+ def load(cls, repo):
+ version = cls._getversion(repo)
+ if version < cls._version:
+ d = cls._readold(repo)
+ elif version == cls._version:
+ d = scmutil.simplekeyvaluefile(repo.vfs, cls._filename)\
+ .read(firstlinenonkeyval=True)
+ else:
+ raise error.Abort(_('this version of shelve is incompatible '
+ 'with the version used in this repo'))
+
+ cls._verifyandtransform(d)
try:
obj = cls()
- obj.name = name
- obj.wctx = repo[wctx]
- obj.pendingctx = repo[pendingctx]
- obj.parents = parents
- obj.nodestoremove = nodestoremove
- obj.branchtorestore = branchtorestore
- obj.keep = keep
+ obj.name = d['name']
+ obj.wctx = repo[d['originalwctx']]
+ obj.pendingctx = repo[d['pendingctx']]
+ obj.parents = d['parents']
+ obj.nodestoremove = d['nodestoremove']
+ obj.branchtorestore = d.get('branchtorestore', '')
+ obj.keep = d.get('keep') == cls._keep
obj.activebookmark = ''
- if activebook != cls._noactivebook:
- obj.activebookmark = activebook
- except error.RepoLookupError as err:
+ if d.get('activebook', '') != cls._noactivebook:
+ obj.activebookmark = d.get('activebook', '')
+ except (error.RepoLookupError, KeyError) as err:
raise error.CorruptedState(str(err))
return obj
@@ -216,19 +251,20 @@
@classmethod
def save(cls, repo, name, originalwctx, pendingctx, nodestoremove,
branchtorestore, keep=False, activebook=''):
- fp = repo.vfs(cls._filename, 'wb')
- fp.write('%i\n' % cls._version)
- fp.write('%s\n' % name)
- fp.write('%s\n' % nodemod.hex(originalwctx.node()))
- fp.write('%s\n' % nodemod.hex(pendingctx.node()))
- fp.write('%s\n' %
- ' '.join([nodemod.hex(p) for p in repo.dirstate.parents()]))
- fp.write('%s\n' %
- ' '.join([nodemod.hex(n) for n in nodestoremove]))
- fp.write('%s\n' % branchtorestore)
- fp.write('%s\n' % (cls._keep if keep else cls._nokeep))
- fp.write('%s\n' % (activebook or cls._noactivebook))
- fp.close()
+ info = {
+ "name": name,
+ "originalwctx": nodemod.hex(originalwctx.node()),
+ "pendingctx": nodemod.hex(pendingctx.node()),
+ "parents": ' '.join([nodemod.hex(p)
+ for p in repo.dirstate.parents()]),
+ "nodestoremove": ' '.join([nodemod.hex(n)
+ for n in nodestoremove]),
+ "branchtorestore": branchtorestore,
+ "keep": cls._keep if keep else cls._nokeep,
+ "activebook": activebook or cls._noactivebook
+ }
+ scmutil.simplekeyvaluefile(repo.vfs, cls._filename)\
+ .write(info, firstline=str(cls._version))
@classmethod
def clear(cls, repo):
@@ -316,7 +352,7 @@
"""return all mutable ancestors for ctx (included)
Much faster than the revset ancestors(ctx) & draft()"""
- seen = set([nodemod.nullrev])
+ seen = {nodemod.nullrev}
visit = collections.deque()
visit.append(ctx)
while visit:
@@ -645,7 +681,7 @@
raise
shelvectx = repo['tip']
- if not shelvectx in state.pendingctx.children():
+ if state.pendingctx not in shelvectx.parents():
# rebase was a no-op, so it produced no child commit
shelvectx = state.pendingctx
else:
@@ -722,7 +758,7 @@
# refresh ctx after rebase completes
shelvectx = repo['tip']
- if not shelvectx in tmpwctx.children():
+ if tmpwctx not in shelvectx.parents():
# rebase was a no-op, so it produced no child commit
shelvectx = tmpwctx
return shelvectx
@@ -934,7 +970,7 @@
('i', 'interactive', None,
_('interactive mode, only works while creating a shelve')),
('', 'stat', None,
- _('output diffstat-style summary of changes'))] + commands.walkopts,
+ _('output diffstat-style summary of changes'))] + cmdutil.walkopts,
_('hg shelve [OPTION]... [FILE]...'))
def shelvecmd(ui, repo, *pats, **opts):
'''save and set aside changes from the working directory
@@ -970,17 +1006,17 @@
all shelved changes, use ``--cleanup``.
'''
allowables = [
- ('addremove', set(['create'])), # 'create' is pseudo action
- ('unknown', set(['create'])),
- ('cleanup', set(['cleanup'])),
-# ('date', set(['create'])), # ignored for passing '--date "0 0"' in tests
- ('delete', set(['delete'])),
- ('edit', set(['create'])),
- ('list', set(['list'])),
- ('message', set(['create'])),
- ('name', set(['create'])),
- ('patch', set(['patch', 'list'])),
- ('stat', set(['stat', 'list'])),
+ ('addremove', {'create'}), # 'create' is pseudo action
+ ('unknown', {'create'}),
+ ('cleanup', {'cleanup'}),
+# ('date', {'create'}), # ignored for passing '--date "0 0"' in tests
+ ('delete', {'delete'}),
+ ('edit', {'create'}),
+ ('list', {'list'}),
+ ('message', {'create'}),
+ ('name', {'create'}),
+ ('patch', {'patch', 'list'}),
+ ('stat', {'stat', 'list'}),
]
def checkopt(opt):
if opts.get(opt):
--- a/hgext/show.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/show.py Tue Jun 20 16:33:46 2017 -0400
@@ -34,7 +34,7 @@
testedwith = 'ships-with-hg-core'
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
revsetpredicate = registrar.revsetpredicate()
class showcmdfunc(registrar._funcregistrarbase):
@@ -58,7 +58,7 @@
showview = showcmdfunc()
@command('show', [
- # TODO: Switch this template flag to use commands.formatteropts if
+ # TODO: Switch this template flag to use cmdutil.formatteropts if
# 'hg show' becomes stable before --template/-T is stable. For now,
# we are putting it here without the '(EXPERIMENTAL)' flag because it
# is an important part of the 'hg show' user experience and the entire
@@ -185,7 +185,7 @@
# Add working directory parent.
wdirrev = repo['.'].rev()
if wdirrev != nullrev:
- relevant += revset.baseset(set([wdirrev]))
+ relevant += revset.baseset({wdirrev})
return subset & relevant
@@ -196,9 +196,8 @@
revs = repo.revs('sort(_underway(), topo)')
revdag = graphmod.dagwalker(repo, revs)
- displayer = cmdutil.changeset_templater(ui, repo, None, None,
- tmpl=fm._t.load(fm._topic),
- mapfile=None, buffered=True)
+ tmpl = fm._t.load(fm._topic)
+ displayer = cmdutil.makelogtemplater(ui, repo, tmpl, buffered=True)
ui.setconfig('experimental', 'graphshorten', True)
cmdutil.displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges)
--- a/hgext/strip.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/strip.py Tue Jun 20 16:33:46 2017 -0400
@@ -14,6 +14,8 @@
lock as lockmod,
merge,
node as nodemod,
+ pycompat,
+ registrar,
repair,
scmutil,
util,
@@ -22,7 +24,7 @@
release = lockmod.release
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
@@ -57,10 +59,7 @@
return s
def strip(ui, repo, revs, update=True, backup=True, force=None, bookmarks=None):
- wlock = lock = None
- try:
- wlock = repo.wlock()
- lock = repo.lock()
+ with repo.wlock(), repo.lock():
if update:
checklocalchanges(repo, force=force)
@@ -84,9 +83,6 @@
repomarks.recordchange(tr)
for bookmark in sorted(bookmarks):
ui.write(_("bookmark '%s' deleted\n") % bookmark)
- finally:
- release(lock, wlock)
-
@command("strip",
[
@@ -132,6 +128,7 @@
Return 0 on success.
"""
+ opts = pycompat.byteskwargs(opts)
backup = True
if opts.get('no_backup') or opts.get('nobackup'):
backup = False
@@ -159,18 +156,12 @@
rsrevs = repair.stripbmrevset(repo, marks[0])
revs.update(set(rsrevs))
if not revs:
- lock = tr = None
- try:
- lock = repo.lock()
- tr = repo.transaction('bookmark')
+ with repo.lock(), repo.transaction('bookmark') as tr:
for bookmark in bookmarks:
del repomarks[bookmark]
repomarks.recordchange(tr)
- tr.close()
- for bookmark in sorted(bookmarks):
- ui.write(_("bookmark '%s' deleted\n") % bookmark)
- finally:
- release(lock, tr)
+ for bookmark in sorted(bookmarks):
+ ui.write(_("bookmark '%s' deleted\n") % bookmark)
if not revs:
raise error.Abort(_('empty revision set'))
--- a/hgext/transplant.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/transplant.py Tue Jun 20 16:33:46 2017 -0400
@@ -42,7 +42,7 @@
pass
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
--- a/hgext/win32mbcs.py Tue Jun 13 22:24:41 2017 -0400
+++ b/hgext/win32mbcs.py Tue Jun 20 16:33:46 2017 -0400
@@ -183,7 +183,8 @@
if pycompat.osname == 'nt':
for f in winfuncs.split():
wrapname(f, wrapper)
- wrapname("mercurial.osutil.listdir", wrapperforlistdir)
+ wrapname("mercurial.util.listdir", wrapperforlistdir)
+ wrapname("mercurial.windows.listdir", wrapperforlistdir)
# wrap functions to be called with local byte string arguments
for f in rfuncs.split():
wrapname(f, reversewrapper)
--- a/i18n/polib.py Tue Jun 13 22:24:41 2017 -0400
+++ b/i18n/polib.py Tue Jun 20 16:33:46 2017 -0400
@@ -804,7 +804,7 @@
real_wrapwidth = wrapwidth - flength + specialchars_count
if wrapwidth > 0 and len(field) > real_wrapwidth:
# Wrap the line but take field name into account
- lines = [''] + [unescape(item) for item in wrap(
+ lines = [''] + [unescape(item) for item in textwrap.wrap(
escaped_field,
wrapwidth - 2, # 2 for quotes ""
drop_whitespace=False,
@@ -879,7 +879,7 @@
if val:
for comment in val.split('\n'):
if wrapwidth > 0 and len(comment) + len(c[1]) > wrapwidth:
- ret += wrap(
+ ret += textwrap.wrap(
comment,
wrapwidth,
initial_indent=c[1],
@@ -903,7 +903,7 @@
# what we want for filenames, so the dirty hack is to
# temporally replace hyphens with a char that a file cannot
# contain, like "*"
- ret += [l.replace('*', '-') for l in wrap(
+ ret += [l.replace('*', '-') for l in textwrap.wrap(
filestr.replace('-', '*'),
wrapwidth,
initial_indent='#: ',
@@ -1552,97 +1552,3 @@
return tup
# }}}
-# class TextWrapper {{{
-
-class TextWrapper(textwrap.TextWrapper):
- """
- Subclass of textwrap.TextWrapper that backport the
- drop_whitespace option.
- """
- def __init__(self, *args, **kwargs):
- drop_whitespace = kwargs.pop('drop_whitespace', True)
- textwrap.TextWrapper.__init__(self, *args, **kwargs)
- self.drop_whitespace = drop_whitespace
-
- def _wrap_chunks(self, chunks):
- """_wrap_chunks(chunks : [string]) -> [string]
-
- Wrap a sequence of text chunks and return a list of lines of
- length 'self.width' or less. (If 'break_long_words' is false,
- some lines may be longer than this.) Chunks correspond roughly
- to words and the whitespace between them: each chunk is
- indivisible (modulo 'break_long_words'), but a line break can
- come between any two chunks. Chunks should not have internal
- whitespace; ie. a chunk is either all whitespace or a "word".
- Whitespace chunks will be removed from the beginning and end of
- lines, but apart from that whitespace is preserved.
- """
- lines = []
- if self.width <= 0:
- raise ValueError("invalid width %r (must be > 0)" % self.width)
-
- # Arrange in reverse order so items can be efficiently popped
- # from a stack of chucks.
- chunks.reverse()
-
- while chunks:
-
- # Start the list of chunks that will make up the current line.
- # cur_len is just the length of all the chunks in cur_line.
- cur_line = []
- cur_len = 0
-
- # Figure out which static string will prefix this line.
- if lines:
- indent = self.subsequent_indent
- else:
- indent = self.initial_indent
-
- # Maximum width for this line.
- width = self.width - len(indent)
-
- # First chunk on line is whitespace -- drop it, unless this
- # is the very beginning of the text (ie. no lines started yet).
- if self.drop_whitespace and chunks[-1].strip() == '' and lines:
- del chunks[-1]
-
- while chunks:
- l = len(chunks[-1])
-
- # Can at least squeeze this chunk onto the current line.
- if cur_len + l <= width:
- cur_line.append(chunks.pop())
- cur_len += l
-
- # Nope, this line is full.
- else:
- break
-
- # The current line is full, and the next chunk is too big to
- # fit on *any* line (not just this one).
- if chunks and len(chunks[-1]) > width:
- self._handle_long_word(chunks, cur_line, cur_len, width)
-
- # If the last chunk on this line is all whitespace, drop it.
- if self.drop_whitespace and cur_line and cur_line[-1].strip() == '':
- del cur_line[-1]
-
- # Convert current line back to a string and store it in list
- # of all lines (return value).
- if cur_line:
- lines.append(indent + ''.join(cur_line))
-
- return lines
-
-# }}}
-# function wrap() {{{
-
-def wrap(text, width=70, **kwargs):
- """
- Wrap a single paragraph of text, returning a list of wrapped lines.
- """
- if sys.version_info < (2, 6):
- return TextWrapper(width=width, **kwargs).wrap(text)
- return textwrap.wrap(text, width=width, **kwargs)
-
-#}}}
--- a/mercurial/__init__.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/__init__.py Tue Jun 20 16:33:46 2017 -0400
@@ -7,126 +7,20 @@
from __future__ import absolute_import
-import imp
-import os
import sys
-import zipimport
-from . import (
- policy
-)
+# Allow 'from mercurial import demandimport' to keep working.
+import hgdemandimport
+demandimport = hgdemandimport
__all__ = []
-modulepolicy = policy.policy
-
-# Modules that have both Python and C implementations. See also the
-# set of .py files under mercurial/pure/.
-_dualmodules = set([
- 'mercurial.base85',
- 'mercurial.bdiff',
- 'mercurial.diffhelpers',
- 'mercurial.mpatch',
- 'mercurial.osutil',
- 'mercurial.parsers',
-])
-
-class hgimporter(object):
- """Object that conforms to import hook interface defined in PEP-302."""
- def find_module(self, name, path=None):
- # We only care about modules that have both C and pure implementations.
- if name in _dualmodules:
- return self
- return None
-
- def load_module(self, name):
- mod = sys.modules.get(name, None)
- if mod:
- return mod
-
- mercurial = sys.modules['mercurial']
-
- # The zip importer behaves sufficiently differently from the default
- # importer to warrant its own code path.
- loader = getattr(mercurial, '__loader__', None)
- if isinstance(loader, zipimport.zipimporter):
- def ziploader(*paths):
- """Obtain a zipimporter for a directory under the main zip."""
- path = os.path.join(loader.archive, *paths)
- zl = sys.path_importer_cache.get(path)
- if not zl:
- zl = zipimport.zipimporter(path)
- return zl
-
- try:
- if modulepolicy in policy.policynoc:
- raise ImportError()
-
- zl = ziploader('mercurial')
- mod = zl.load_module(name)
- # Unlike imp, ziploader doesn't expose module metadata that
- # indicates the type of module. So just assume what we found
- # is OK (even though it could be a pure Python module).
- except ImportError:
- if modulepolicy == b'c':
- raise
- zl = ziploader('mercurial', 'pure')
- mod = zl.load_module(name)
-
- sys.modules[name] = mod
- return mod
-
- # Unlike the default importer which searches special locations and
- # sys.path, we only look in the directory where "mercurial" was
- # imported from.
-
- # imp.find_module doesn't support submodules (modules with ".").
- # Instead you have to pass the parent package's __path__ attribute
- # as the path argument.
- stem = name.split('.')[-1]
-
- try:
- if modulepolicy in policy.policynoc:
- raise ImportError()
-
- modinfo = imp.find_module(stem, mercurial.__path__)
-
- # The Mercurial installer used to copy files from
- # mercurial/pure/*.py to mercurial/*.py. Therefore, it's possible
- # for some installations to have .py files under mercurial/*.
- # Loading Python modules when we expected C versions could result
- # in a) poor performance b) loading a version from a previous
- # Mercurial version, potentially leading to incompatibility. Either
- # scenario is bad. So we verify that modules loaded from
- # mercurial/* are C extensions. If the current policy allows the
- # loading of .py modules, the module will be re-imported from
- # mercurial/pure/* below.
- if modinfo[2][2] != imp.C_EXTENSION:
- raise ImportError('.py version of %s found where C '
- 'version should exist' % name)
-
- except ImportError:
- if modulepolicy == b'c':
- raise
-
- # Could not load the C extension and pure Python is allowed. So
- # try to load them.
- from . import pure
- modinfo = imp.find_module(stem, pure.__path__)
- if not modinfo:
- raise ImportError('could not find mercurial module %s' %
- name)
-
- mod = imp.load_module(name, *modinfo)
- sys.modules[name] = mod
- return mod
-
# Python 3 uses a custom module loader that transforms source code between
# source file reading and compilation. This is done by registering a custom
# finder that changes the spec for Mercurial modules to use a custom loader.
if sys.version_info[0] >= 3:
- from . import pure
import importlib
+ import importlib.abc
import io
import token
import tokenize
@@ -140,14 +34,9 @@
# zstd is already dual-version clean, don't try and mangle it
if fullname.startswith('mercurial.zstd'):
return None
-
- # This assumes Python 3 doesn't support loading C modules.
- if fullname in _dualmodules:
- stem = fullname.split('.')[-1]
- fullname = 'mercurial.pure.%s' % stem
- target = pure
- assert len(path) == 1
- path = [os.path.join(path[0], 'pure')]
+ # pywatchman is already dual-version clean, don't try and mangle it
+ if fullname.startswith('hgext.fsmonitor.pywatchman'):
+ return None
# Try to find the module using other registered finders.
spec = None
@@ -165,12 +54,16 @@
if not spec:
return None
- if fullname.startswith('mercurial.pure.'):
- spec.name = spec.name.replace('.pure.', '.')
-
# TODO need to support loaders from alternate specs, like zip
# loaders.
- spec.loader = hgloader(spec.name, spec.origin)
+ loader = hgloader(spec.name, spec.origin)
+ # Can't use util.safehasattr here because that would require
+ # importing util, and we're in import code.
+ if hasattr(spec.loader, 'loader'): # hasattr-py3-only
+ # This is a nested loader (maybe a lazy loader?)
+ spec.loader.loader = loader
+ else:
+ spec.loader = loader
return spec
def replacetokens(tokens, fullname):
@@ -391,13 +284,10 @@
# implemented them because they are very ugly.
return super(hgloader, self).source_to_code(data, path)
-# We automagically register our custom importer as a side-effect of loading.
-# This is necessary to ensure that any entry points are able to import
-# mercurial.* modules without having to perform this registration themselves.
-if sys.version_info[0] >= 3:
- _importercls = hgpathentryfinder
-else:
- _importercls = hgimporter
-if not any(isinstance(x, _importercls) for x in sys.meta_path):
- # meta_path is used before any implicit finders and before sys.path.
- sys.meta_path.insert(0, _importercls())
+ # We automagically register our custom importer as a side-effect of
+ # loading. This is necessary to ensure that any entry points are able
+ # to import mercurial.* modules without having to perform this
+ # registration themselves.
+ if not any(isinstance(x, hgpathentryfinder) for x in sys.meta_path):
+ # meta_path is used before any implicit finders and before sys.path.
+ sys.meta_path.insert(0, hgpathentryfinder())
--- a/mercurial/ancestor.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/ancestor.py Tue Jun 20 16:33:46 2017 -0400
@@ -47,7 +47,7 @@
sv |= poison
if v in nodes:
# history is linear
- return set([v])
+ return {v}
if sv < poison:
for p in pfunc(v):
sp = seen[p]
@@ -151,7 +151,7 @@
def hasbases(self):
'''whether the common set has any non-trivial bases'''
- return self.bases and self.bases != set([nullrev])
+ return self.bases and self.bases != {nullrev}
def addbases(self, newbases):
'''grow the ancestor set by adding new bases'''
--- a/mercurial/base85.c Tue Jun 13 22:24:41 2017 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,182 +0,0 @@
-/*
- base85 codec
-
- Copyright 2006 Brendan Cully <brendan@kublai.com>
-
- This software may be used and distributed according to the terms of
- the GNU General Public License, incorporated herein by reference.
-
- Largely based on git's implementation
-*/
-
-#define PY_SSIZE_T_CLEAN
-#include <Python.h>
-
-#include "util.h"
-
-static const char b85chars[] = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ"
- "abcdefghijklmnopqrstuvwxyz!#$%&()*+-;<=>?@^_`{|}~";
-static char b85dec[256];
-
-static void b85prep(void)
-{
- unsigned i;
-
- memset(b85dec, 0, sizeof(b85dec));
- for (i = 0; i < sizeof(b85chars); i++)
- b85dec[(int)(b85chars[i])] = i + 1;
-}
-
-static PyObject *b85encode(PyObject *self, PyObject *args)
-{
- const unsigned char *text;
- PyObject *out;
- char *dst;
- Py_ssize_t len, olen, i;
- unsigned int acc, val, ch;
- int pad = 0;
-
- if (!PyArg_ParseTuple(args, "s#|i", &text, &len, &pad))
- return NULL;
-
- if (pad)
- olen = ((len + 3) / 4 * 5) - 3;
- else {
- olen = len % 4;
- if (olen)
- olen++;
- olen += len / 4 * 5;
- }
- if (!(out = PyBytes_FromStringAndSize(NULL, olen + 3)))
- return NULL;
-
- dst = PyBytes_AsString(out);
-
- while (len) {
- acc = 0;
- for (i = 24; i >= 0; i -= 8) {
- ch = *text++;
- acc |= ch << i;
- if (--len == 0)
- break;
- }
- for (i = 4; i >= 0; i--) {
- val = acc % 85;
- acc /= 85;
- dst[i] = b85chars[val];
- }
- dst += 5;
- }
-
- if (!pad)
- _PyBytes_Resize(&out, olen);
-
- return out;
-}
-
-static PyObject *b85decode(PyObject *self, PyObject *args)
-{
- PyObject *out;
- const char *text;
- char *dst;
- Py_ssize_t len, i, j, olen, cap;
- int c;
- unsigned int acc;
-
- if (!PyArg_ParseTuple(args, "s#", &text, &len))
- return NULL;
-
- olen = len / 5 * 4;
- i = len % 5;
- if (i)
- olen += i - 1;
- if (!(out = PyBytes_FromStringAndSize(NULL, olen)))
- return NULL;
-
- dst = PyBytes_AsString(out);
-
- i = 0;
- while (i < len)
- {
- acc = 0;
- cap = len - i - 1;
- if (cap > 4)
- cap = 4;
- for (j = 0; j < cap; i++, j++)
- {
- c = b85dec[(int)*text++] - 1;
- if (c < 0)
- return PyErr_Format(
- PyExc_ValueError,
- "bad base85 character at position %d",
- (int)i);
- acc = acc * 85 + c;
- }
- if (i++ < len)
- {
- c = b85dec[(int)*text++] - 1;
- if (c < 0)
- return PyErr_Format(
- PyExc_ValueError,
- "bad base85 character at position %d",
- (int)i);
- /* overflow detection: 0xffffffff == "|NsC0",
- * "|NsC" == 0x03030303 */
- if (acc > 0x03030303 || (acc *= 85) > 0xffffffff - c)
- return PyErr_Format(
- PyExc_ValueError,
- "bad base85 sequence at position %d",
- (int)i);
- acc += c;
- }
-
- cap = olen < 4 ? olen : 4;
- olen -= cap;
- for (j = 0; j < 4 - cap; j++)
- acc *= 85;
- if (cap && cap < 4)
- acc += 0xffffff >> (cap - 1) * 8;
- for (j = 0; j < cap; j++)
- {
- acc = (acc << 8) | (acc >> 24);
- *dst++ = acc;
- }
- }
-
- return out;
-}
-
-static char base85_doc[] = "Base85 Data Encoding";
-
-static PyMethodDef methods[] = {
- {"b85encode", b85encode, METH_VARARGS,
- "Encode text in base85.\n\n"
- "If the second parameter is true, pad the result to a multiple of "
- "five characters.\n"},
- {"b85decode", b85decode, METH_VARARGS, "Decode base85 text.\n"},
- {NULL, NULL}
-};
-
-#ifdef IS_PY3K
-static struct PyModuleDef base85_module = {
- PyModuleDef_HEAD_INIT,
- "base85",
- base85_doc,
- -1,
- methods
-};
-
-PyMODINIT_FUNC PyInit_base85(void)
-{
- b85prep();
-
- return PyModule_Create(&base85_module);
-}
-#else
-PyMODINIT_FUNC initbase85(void)
-{
- Py_InitModule3("base85", methods, base85_doc);
-
- b85prep();
-}
-#endif
--- a/mercurial/bdiff_module.c Tue Jun 13 22:24:41 2017 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,213 +0,0 @@
-/*
- bdiff.c - efficient binary diff extension for Mercurial
-
- Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
-
- This software may be used and distributed according to the terms of
- the GNU General Public License, incorporated herein by reference.
-
- Based roughly on Python difflib
-*/
-
-#define PY_SSIZE_T_CLEAN
-#include <Python.h>
-#include <stdlib.h>
-#include <string.h>
-#include <limits.h>
-
-#include "bdiff.h"
-#include "bitmanipulation.h"
-#include "util.h"
-
-
-static PyObject *blocks(PyObject *self, PyObject *args)
-{
- PyObject *sa, *sb, *rl = NULL, *m;
- struct bdiff_line *a, *b;
- struct bdiff_hunk l, *h;
- int an, bn, count, pos = 0;
-
- l.next = NULL;
-
- if (!PyArg_ParseTuple(args, "SS:bdiff", &sa, &sb))
- return NULL;
-
- an = bdiff_splitlines(PyBytes_AsString(sa), PyBytes_Size(sa), &a);
- bn = bdiff_splitlines(PyBytes_AsString(sb), PyBytes_Size(sb), &b);
-
- if (!a || !b)
- goto nomem;
-
- count = bdiff_diff(a, an, b, bn, &l);
- if (count < 0)
- goto nomem;
-
- rl = PyList_New(count);
- if (!rl)
- goto nomem;
-
- for (h = l.next; h; h = h->next) {
- m = Py_BuildValue("iiii", h->a1, h->a2, h->b1, h->b2);
- PyList_SetItem(rl, pos, m);
- pos++;
- }
-
-nomem:
- free(a);
- free(b);
- bdiff_freehunks(l.next);
- return rl ? rl : PyErr_NoMemory();
-}
-
-static PyObject *bdiff(PyObject *self, PyObject *args)
-{
- char *sa, *sb, *rb, *ia, *ib;
- PyObject *result = NULL;
- struct bdiff_line *al, *bl;
- struct bdiff_hunk l, *h;
- int an, bn, count;
- Py_ssize_t len = 0, la, lb, li = 0, lcommon = 0, lmax;
- PyThreadState *_save;
-
- l.next = NULL;
-
- if (!PyArg_ParseTuple(args, "s#s#:bdiff", &sa, &la, &sb, &lb))
- return NULL;
-
- if (la > UINT_MAX || lb > UINT_MAX) {
- PyErr_SetString(PyExc_ValueError, "bdiff inputs too large");
- return NULL;
- }
-
- _save = PyEval_SaveThread();
-
- lmax = la > lb ? lb : la;
- for (ia = sa, ib = sb;
- li < lmax && *ia == *ib;
- ++li, ++ia, ++ib)
- if (*ia == '\n')
- lcommon = li + 1;
- /* we can almost add: if (li == lmax) lcommon = li; */
-
- an = bdiff_splitlines(sa + lcommon, la - lcommon, &al);
- bn = bdiff_splitlines(sb + lcommon, lb - lcommon, &bl);
- if (!al || !bl)
- goto nomem;
-
- count = bdiff_diff(al, an, bl, bn, &l);
- if (count < 0)
- goto nomem;
-
- /* calculate length of output */
- la = lb = 0;
- for (h = l.next; h; h = h->next) {
- if (h->a1 != la || h->b1 != lb)
- len += 12 + bl[h->b1].l - bl[lb].l;
- la = h->a2;
- lb = h->b2;
- }
- PyEval_RestoreThread(_save);
- _save = NULL;
-
- result = PyBytes_FromStringAndSize(NULL, len);
-
- if (!result)
- goto nomem;
-
- /* build binary patch */
- rb = PyBytes_AsString(result);
- la = lb = 0;
-
- for (h = l.next; h; h = h->next) {
- if (h->a1 != la || h->b1 != lb) {
- len = bl[h->b1].l - bl[lb].l;
- putbe32((uint32_t)(al[la].l + lcommon - al->l), rb);
- putbe32((uint32_t)(al[h->a1].l + lcommon - al->l), rb + 4);
- putbe32((uint32_t)len, rb + 8);
- memcpy(rb + 12, bl[lb].l, len);
- rb += 12 + len;
- }
- la = h->a2;
- lb = h->b2;
- }
-
-nomem:
- if (_save)
- PyEval_RestoreThread(_save);
- free(al);
- free(bl);
- bdiff_freehunks(l.next);
- return result ? result : PyErr_NoMemory();
-}
-
-/*
- * If allws != 0, remove all whitespace (' ', \t and \r). Otherwise,
- * reduce whitespace sequences to a single space and trim remaining whitespace
- * from end of lines.
- */
-static PyObject *fixws(PyObject *self, PyObject *args)
-{
- PyObject *s, *result = NULL;
- char allws, c;
- const char *r;
- Py_ssize_t i, rlen, wlen = 0;
- char *w;
-
- if (!PyArg_ParseTuple(args, "Sb:fixws", &s, &allws))
- return NULL;
- r = PyBytes_AsString(s);
- rlen = PyBytes_Size(s);
-
- w = (char *)PyMem_Malloc(rlen ? rlen : 1);
- if (!w)
- goto nomem;
-
- for (i = 0; i != rlen; i++) {
- c = r[i];
- if (c == ' ' || c == '\t' || c == '\r') {
- if (!allws && (wlen == 0 || w[wlen - 1] != ' '))
- w[wlen++] = ' ';
- } else if (c == '\n' && !allws
- && wlen > 0 && w[wlen - 1] == ' ') {
- w[wlen - 1] = '\n';
- } else {
- w[wlen++] = c;
- }
- }
-
- result = PyBytes_FromStringAndSize(w, wlen);
-
-nomem:
- PyMem_Free(w);
- return result ? result : PyErr_NoMemory();
-}
-
-
-static char mdiff_doc[] = "Efficient binary diff.";
-
-static PyMethodDef methods[] = {
- {"bdiff", bdiff, METH_VARARGS, "calculate a binary diff\n"},
- {"blocks", blocks, METH_VARARGS, "find a list of matching lines\n"},
- {"fixws", fixws, METH_VARARGS, "normalize diff whitespaces\n"},
- {NULL, NULL}
-};
-
-#ifdef IS_PY3K
-static struct PyModuleDef bdiff_module = {
- PyModuleDef_HEAD_INIT,
- "bdiff",
- mdiff_doc,
- -1,
- methods
-};
-
-PyMODINIT_FUNC PyInit_bdiff(void)
-{
- return PyModule_Create(&bdiff_module);
-}
-#else
-PyMODINIT_FUNC initbdiff(void)
-{
- Py_InitModule3("bdiff", methods, mdiff_doc);
-}
-#endif
--- a/mercurial/bitmanipulation.h Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/bitmanipulation.h Tue Jun 20 16:33:46 2017 -0400
@@ -1,6 +1,8 @@
#ifndef _HG_BITMANIPULATION_H_
#define _HG_BITMANIPULATION_H_
+#include <string.h>
+
#include "compat.h"
static inline uint32_t getbe32(const char *c)
--- a/mercurial/bookmarks.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/bookmarks.py Tue Jun 20 16:33:46 2017 -0400
@@ -50,28 +50,35 @@
def __init__(self, repo):
dict.__init__(self)
self._repo = repo
+ self._clean = True
+ self._aclean = True
+ nm = repo.changelog.nodemap
+ tonode = bin # force local lookup
+ setitem = dict.__setitem__
try:
- bkfile = _getbkfile(repo)
- for line in bkfile:
- line = line.strip()
- if not line:
- continue
- if ' ' not in line:
- repo.ui.warn(_('malformed line in .hg/bookmarks: %r\n')
- % line)
- continue
- sha, refspec = line.split(' ', 1)
- refspec = encoding.tolocal(refspec)
- try:
- self[refspec] = repo.changelog.lookup(sha)
- except LookupError:
- pass
+ with _getbkfile(repo) as bkfile:
+ for line in bkfile:
+ line = line.strip()
+ if not line:
+ continue
+ try:
+ sha, refspec = line.split(' ', 1)
+ node = tonode(sha)
+ if node in nm:
+ refspec = encoding.tolocal(refspec)
+ setitem(self, refspec, node)
+ except (TypeError, ValueError):
+ # TypeError:
+ # - bin(...)
+ # ValueError:
+ # - node in nm, for non-20-bytes entry
+ # - split(...), for string without ' '
+ repo.ui.warn(_('malformed line in .hg/bookmarks: %r\n')
+ % line)
except IOError as inst:
if inst.errno != errno.ENOENT:
raise
- self._clean = True
self._active = _readactive(repo, self)
- self._aclean = True
@property
def active(self):
@@ -226,6 +233,28 @@
deleted = True
return deleted
+def headsforactive(repo):
+ """Given a repo with an active bookmark, return divergent bookmark nodes.
+
+ Args:
+ repo: A repository with an active bookmark.
+
+ Returns:
+ A list of binary node ids that is the full list of other
+ revisions with bookmarks divergent from the active bookmark. If
+ there were no divergent bookmarks, then this list will contain
+ only one entry.
+ """
+ if not repo._activebookmark:
+ raise ValueError(
+ 'headsforactive() only makes sense with an active bookmark')
+ name = repo._activebookmark.split('@', 1)[0]
+ heads = []
+ for mark, n in repo._bookmarks.iteritems():
+ if mark.split('@', 1)[0] == name:
+ heads.append(n)
+ return heads
+
def calculateupdate(ui, repo, checkout):
'''Return a tuple (targetrev, movemarkfrom) indicating the rev to
check out and where to move the active bookmark from, if needed.'''
--- a/mercurial/branchmap.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/branchmap.py Tue Jun 20 16:33:46 2017 -0400
@@ -67,8 +67,6 @@
partial.setdefault(label, []).append(node)
if state == 'c':
partial._closednodes.add(node)
- except KeyboardInterrupt:
- raise
except Exception as inst:
if repo.ui.debugflag:
msg = 'invalid branchheads cache'
@@ -408,8 +406,7 @@
# fast path: extract data from cache, use it if node is matching
reponode = changelog.node(rev)[:_rbcnodelen]
- cachenode, branchidx = unpack_from(
- _rbcrecfmt, util.buffer(self._rbcrevs), rbcrevidx)
+ cachenode, branchidx = unpack_from(_rbcrecfmt, self._rbcrevs, rbcrevidx)
close = bool(branchidx & _rbccloseflag)
if close:
branchidx &= _rbcbranchidxmask
--- a/mercurial/bundle2.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/bundle2.py Tue Jun 20 16:33:46 2017 -0400
@@ -1005,7 +1005,7 @@
# backup exception data for later
ui.debug('bundle2-input-stream-interrupt: encoding exception %s'
% exc)
- exc_info = sys.exc_info()
+ tb = sys.exc_info()[2]
msg = 'unexpected error: %s' % exc
interpart = bundlepart('error:abort', [('message', msg)],
mandatory=False)
@@ -1016,10 +1016,7 @@
outdebug(ui, 'closing payload chunk')
# abort current part payload
yield _pack(_fpayloadsize, 0)
- if pycompat.ispy3:
- raise exc_info[0](exc_info[1]).with_traceback(exc_info[2])
- else:
- exec("""raise exc_info[0], exc_info[1], exc_info[2]""")
+ pycompat.raisewithtb(exc, tb)
# end of payload
outdebug(ui, 'closing payload chunk')
yield _pack(_fpayloadsize, 0)
@@ -1326,6 +1323,9 @@
caps['obsmarkers'] = supportedformat
if allowpushback:
caps['pushback'] = ()
+ cpmode = repo.ui.config('server', 'concurrent-push-mode', 'strict')
+ if cpmode == 'check-related':
+ caps['checkheads'] = ('related',)
return caps
def bundle2caps(remote):
@@ -1342,6 +1342,91 @@
obscaps = caps.get('obsmarkers', ())
return [int(c[1:]) for c in obscaps if c.startswith('V')]
+def writenewbundle(ui, repo, source, filename, bundletype, outgoing, opts,
+ vfs=None, compression=None, compopts=None):
+ if bundletype.startswith('HG10'):
+ cg = changegroup.getchangegroup(repo, source, outgoing, version='01')
+ return writebundle(ui, cg, filename, bundletype, vfs=vfs,
+ compression=compression, compopts=compopts)
+ elif not bundletype.startswith('HG20'):
+ raise error.ProgrammingError('unknown bundle type: %s' % bundletype)
+
+ caps = {}
+ if 'obsolescence' in opts:
+ caps['obsmarkers'] = ('V1',)
+ bundle = bundle20(ui, caps)
+ bundle.setcompression(compression, compopts)
+ _addpartsfromopts(ui, repo, bundle, source, outgoing, opts)
+ chunkiter = bundle.getchunks()
+
+ return changegroup.writechunks(ui, chunkiter, filename, vfs=vfs)
+
+def _addpartsfromopts(ui, repo, bundler, source, outgoing, opts):
+ # We should eventually reconcile this logic with the one behind
+ # 'exchange.getbundle2partsgenerator'.
+ #
+ # The type of input from 'getbundle' and 'writenewbundle' are a bit
+ # different right now. So we keep them separated for now for the sake of
+ # simplicity.
+
+ # we always want a changegroup in such bundle
+ cgversion = opts.get('cg.version')
+ if cgversion is None:
+ cgversion = changegroup.safeversion(repo)
+ cg = changegroup.getchangegroup(repo, source, outgoing,
+ version=cgversion)
+ part = bundler.newpart('changegroup', data=cg.getchunks())
+ part.addparam('version', cg.version)
+ if 'clcount' in cg.extras:
+ part.addparam('nbchanges', str(cg.extras['clcount']),
+ mandatory=False)
+
+ addparttagsfnodescache(repo, bundler, outgoing)
+
+ if opts.get('obsolescence', False):
+ obsmarkers = repo.obsstore.relevantmarkers(outgoing.missing)
+ buildobsmarkerspart(bundler, obsmarkers)
+
+def addparttagsfnodescache(repo, bundler, outgoing):
+ # we include the tags fnode cache for the bundle changeset
+ # (as an optional parts)
+ cache = tags.hgtagsfnodescache(repo.unfiltered())
+ chunks = []
+
+ # .hgtags fnodes are only relevant for head changesets. While we could
+ # transfer values for all known nodes, there will likely be little to
+ # no benefit.
+ #
+ # We don't bother using a generator to produce output data because
+ # a) we only have 40 bytes per head and even esoteric numbers of heads
+ # consume little memory (1M heads is 40MB) b) we don't want to send the
+ # part if we don't have entries and knowing if we have entries requires
+ # cache lookups.
+ for node in outgoing.missingheads:
+ # Don't compute missing, as this may slow down serving.
+ fnode = cache.getfnode(node, computemissing=False)
+ if fnode is not None:
+ chunks.extend([node, fnode])
+
+ if chunks:
+ bundler.newpart('hgtagsfnodes', data=''.join(chunks))
+
+def buildobsmarkerspart(bundler, markers):
+ """add an obsmarker part to the bundler with <markers>
+
+ No part is created if markers is empty.
+ Raises ValueError if the bundler doesn't support any known obsmarker format.
+ """
+ if not markers:
+ return None
+
+ remoteversions = obsmarkersversion(bundler.capabilities)
+ version = obsolete.commonversion(remoteversions)
+ if version is None:
+ raise ValueError('bundler does not support common obsmarker format')
+ stream = obsolete.encodemarkers(markers, True, version=version)
+ return bundler.newpart('obsmarkers', data=stream)
+
def writebundle(ui, cg, filename, bundletype, vfs=None, compression=None,
compopts=None):
"""Write a bundle file and return its filename.
@@ -1389,12 +1474,7 @@
This is a very early implementation that will massive rework before being
inflicted to any end-user.
"""
- # Make sure we trigger a transaction creation
- #
- # The addchangegroup function will get a transaction object by itself, but
- # we need to make sure we trigger the creation of a transaction object used
- # for the whole processing scope.
- op.gettransaction()
+ tr = op.gettransaction()
unpackerversion = inpart.params.get('version', '01')
# We should raise an appropriate exception here
cg = changegroup.getunbundler(unpackerversion, inpart, None)
@@ -1412,7 +1492,8 @@
op.repo.requirements.add('treemanifest')
op.repo._applyopenerreqs()
op.repo._writerequirements()
- ret = cg.apply(op.repo, 'bundle2', 'bundle2', expectedtotal=nbchangesets)
+ ret = cg.apply(op.repo, tr, 'bundle2', 'bundle2',
+ expectedtotal=nbchangesets)
op.records.add('changegroup', {'return': ret})
if op.reply is not None:
# This is definitely not the final form of this
@@ -1470,18 +1551,13 @@
real_part = util.digestchecker(url.open(op.ui, raw_url), size, digests)
- # Make sure we trigger a transaction creation
- #
- # The addchangegroup function will get a transaction object by itself, but
- # we need to make sure we trigger the creation of a transaction object used
- # for the whole processing scope.
- op.gettransaction()
+ tr = op.gettransaction()
from . import exchange
cg = exchange.readbundle(op.repo.ui, real_part, raw_url)
if not isinstance(cg, changegroup.cg1unpacker):
raise error.Abort(_('%s: not a bundle version 1.0') %
util.hidepassword(raw_url))
- ret = cg.apply(op.repo, 'bundle2', 'bundle2')
+ ret = cg.apply(op.repo, tr, 'bundle2', 'bundle2')
op.records.add('changegroup', {'return': ret})
if op.reply is not None:
# This is definitely not the final form of this
@@ -1521,6 +1597,35 @@
raise error.PushRaced('repository changed while pushing - '
'please try again')
+@parthandler('check:updated-heads')
+def handlecheckupdatedheads(op, inpart):
+ """check for race on the heads touched by a push
+
+ This is similar to 'check:heads' but focus on the heads actually updated
+ during the push. If other activities happen on unrelated heads, it is
+ ignored.
+
+ This allow server with high traffic to avoid push contention as long as
+ unrelated parts of the graph are involved."""
+ h = inpart.read(20)
+ heads = []
+ while len(h) == 20:
+ heads.append(h)
+ h = inpart.read(20)
+ assert not h
+ # trigger a transaction so that we are guaranteed to have the lock now.
+ if op.ui.configbool('experimental', 'bundle2lazylocking'):
+ op.gettransaction()
+
+ currentheads = set()
+ for ls in op.repo.branchmap().itervalues():
+ currentheads.update(ls)
+
+ for h in heads:
+ if h not in currentheads:
+ raise error.PushRaced('repository changed while pushing - '
+ 'please try again')
+
@parthandler('output')
def handleoutput(op, inpart):
"""forward output captured on the server to the client"""
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cext/base85.c Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,189 @@
+/*
+ base85 codec
+
+ Copyright 2006 Brendan Cully <brendan@kublai.com>
+
+ This software may be used and distributed according to the terms of
+ the GNU General Public License, incorporated herein by reference.
+
+ Largely based on git's implementation
+*/
+
+#define PY_SSIZE_T_CLEAN
+#include <Python.h>
+
+#include "util.h"
+
+static const char b85chars[] = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ"
+ "abcdefghijklmnopqrstuvwxyz!#$%&()*+-;<=>?@^_`{|}~";
+static char b85dec[256];
+
+static void b85prep(void)
+{
+ unsigned i;
+
+ memset(b85dec, 0, sizeof(b85dec));
+ for (i = 0; i < sizeof(b85chars); i++)
+ b85dec[(int)(b85chars[i])] = i + 1;
+}
+
+static PyObject *b85encode(PyObject *self, PyObject *args)
+{
+ const unsigned char *text;
+ PyObject *out;
+ char *dst;
+ Py_ssize_t len, olen, i;
+ unsigned int acc, val, ch;
+ int pad = 0;
+
+ if (!PyArg_ParseTuple(args, "s#|i", &text, &len, &pad))
+ return NULL;
+
+ if (pad)
+ olen = ((len + 3) / 4 * 5) - 3;
+ else {
+ olen = len % 4;
+ if (olen)
+ olen++;
+ olen += len / 4 * 5;
+ }
+ if (!(out = PyBytes_FromStringAndSize(NULL, olen + 3)))
+ return NULL;
+
+ dst = PyBytes_AsString(out);
+
+ while (len) {
+ acc = 0;
+ for (i = 24; i >= 0; i -= 8) {
+ ch = *text++;
+ acc |= ch << i;
+ if (--len == 0)
+ break;
+ }
+ for (i = 4; i >= 0; i--) {
+ val = acc % 85;
+ acc /= 85;
+ dst[i] = b85chars[val];
+ }
+ dst += 5;
+ }
+
+ if (!pad)
+ _PyBytes_Resize(&out, olen);
+
+ return out;
+}
+
+static PyObject *b85decode(PyObject *self, PyObject *args)
+{
+ PyObject *out;
+ const char *text;
+ char *dst;
+ Py_ssize_t len, i, j, olen, cap;
+ int c;
+ unsigned int acc;
+
+ if (!PyArg_ParseTuple(args, "s#", &text, &len))
+ return NULL;
+
+ olen = len / 5 * 4;
+ i = len % 5;
+ if (i)
+ olen += i - 1;
+ if (!(out = PyBytes_FromStringAndSize(NULL, olen)))
+ return NULL;
+
+ dst = PyBytes_AsString(out);
+
+ i = 0;
+ while (i < len)
+ {
+ acc = 0;
+ cap = len - i - 1;
+ if (cap > 4)
+ cap = 4;
+ for (j = 0; j < cap; i++, j++)
+ {
+ c = b85dec[(int)*text++] - 1;
+ if (c < 0)
+ return PyErr_Format(
+ PyExc_ValueError,
+ "bad base85 character at position %d",
+ (int)i);
+ acc = acc * 85 + c;
+ }
+ if (i++ < len)
+ {
+ c = b85dec[(int)*text++] - 1;
+ if (c < 0)
+ return PyErr_Format(
+ PyExc_ValueError,
+ "bad base85 character at position %d",
+ (int)i);
+ /* overflow detection: 0xffffffff == "|NsC0",
+ * "|NsC" == 0x03030303 */
+ if (acc > 0x03030303 || (acc *= 85) > 0xffffffff - c)
+ return PyErr_Format(
+ PyExc_ValueError,
+ "bad base85 sequence at position %d",
+ (int)i);
+ acc += c;
+ }
+
+ cap = olen < 4 ? olen : 4;
+ olen -= cap;
+ for (j = 0; j < 4 - cap; j++)
+ acc *= 85;
+ if (cap && cap < 4)
+ acc += 0xffffff >> (cap - 1) * 8;
+ for (j = 0; j < cap; j++)
+ {
+ acc = (acc << 8) | (acc >> 24);
+ *dst++ = acc;
+ }
+ }
+
+ return out;
+}
+
+static char base85_doc[] = "Base85 Data Encoding";
+
+static PyMethodDef methods[] = {
+ {"b85encode", b85encode, METH_VARARGS,
+ "Encode text in base85.\n\n"
+ "If the second parameter is true, pad the result to a multiple of "
+ "five characters.\n"},
+ {"b85decode", b85decode, METH_VARARGS, "Decode base85 text.\n"},
+ {NULL, NULL}
+};
+
+static const int version = 1;
+
+#ifdef IS_PY3K
+static struct PyModuleDef base85_module = {
+ PyModuleDef_HEAD_INIT,
+ "base85",
+ base85_doc,
+ -1,
+ methods
+};
+
+PyMODINIT_FUNC PyInit_base85(void)
+{
+ PyObject *m;
+ b85prep();
+
+ m = PyModule_Create(&base85_module);
+ PyModule_AddIntConstant(m, "version", version);
+ return m;
+}
+#else
+PyMODINIT_FUNC initbase85(void)
+{
+ PyObject *m;
+ m = Py_InitModule3("base85", methods, base85_doc);
+
+ b85prep();
+ PyModule_AddIntConstant(m, "version", version);
+}
+#endif
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cext/bdiff.c Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,220 @@
+/*
+ bdiff.c - efficient binary diff extension for Mercurial
+
+ Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
+
+ This software may be used and distributed according to the terms of
+ the GNU General Public License, incorporated herein by reference.
+
+ Based roughly on Python difflib
+*/
+
+#define PY_SSIZE_T_CLEAN
+#include <Python.h>
+#include <stdlib.h>
+#include <string.h>
+#include <limits.h>
+
+#include "bdiff.h"
+#include "bitmanipulation.h"
+#include "util.h"
+
+
+static PyObject *blocks(PyObject *self, PyObject *args)
+{
+ PyObject *sa, *sb, *rl = NULL, *m;
+ struct bdiff_line *a, *b;
+ struct bdiff_hunk l, *h;
+ int an, bn, count, pos = 0;
+
+ l.next = NULL;
+
+ if (!PyArg_ParseTuple(args, "SS:bdiff", &sa, &sb))
+ return NULL;
+
+ an = bdiff_splitlines(PyBytes_AsString(sa), PyBytes_Size(sa), &a);
+ bn = bdiff_splitlines(PyBytes_AsString(sb), PyBytes_Size(sb), &b);
+
+ if (!a || !b)
+ goto nomem;
+
+ count = bdiff_diff(a, an, b, bn, &l);
+ if (count < 0)
+ goto nomem;
+
+ rl = PyList_New(count);
+ if (!rl)
+ goto nomem;
+
+ for (h = l.next; h; h = h->next) {
+ m = Py_BuildValue("iiii", h->a1, h->a2, h->b1, h->b2);
+ PyList_SetItem(rl, pos, m);
+ pos++;
+ }
+
+nomem:
+ free(a);
+ free(b);
+ bdiff_freehunks(l.next);
+ return rl ? rl : PyErr_NoMemory();
+}
+
+static PyObject *bdiff(PyObject *self, PyObject *args)
+{
+ char *sa, *sb, *rb, *ia, *ib;
+ PyObject *result = NULL;
+ struct bdiff_line *al, *bl;
+ struct bdiff_hunk l, *h;
+ int an, bn, count;
+ Py_ssize_t len = 0, la, lb, li = 0, lcommon = 0, lmax;
+ PyThreadState *_save;
+
+ l.next = NULL;
+
+ if (!PyArg_ParseTuple(args, "s#s#:bdiff", &sa, &la, &sb, &lb))
+ return NULL;
+
+ if (la > UINT_MAX || lb > UINT_MAX) {
+ PyErr_SetString(PyExc_ValueError, "bdiff inputs too large");
+ return NULL;
+ }
+
+ _save = PyEval_SaveThread();
+
+ lmax = la > lb ? lb : la;
+ for (ia = sa, ib = sb;
+ li < lmax && *ia == *ib;
+ ++li, ++ia, ++ib)
+ if (*ia == '\n')
+ lcommon = li + 1;
+ /* we can almost add: if (li == lmax) lcommon = li; */
+
+ an = bdiff_splitlines(sa + lcommon, la - lcommon, &al);
+ bn = bdiff_splitlines(sb + lcommon, lb - lcommon, &bl);
+ if (!al || !bl)
+ goto nomem;
+
+ count = bdiff_diff(al, an, bl, bn, &l);
+ if (count < 0)
+ goto nomem;
+
+ /* calculate length of output */
+ la = lb = 0;
+ for (h = l.next; h; h = h->next) {
+ if (h->a1 != la || h->b1 != lb)
+ len += 12 + bl[h->b1].l - bl[lb].l;
+ la = h->a2;
+ lb = h->b2;
+ }
+ PyEval_RestoreThread(_save);
+ _save = NULL;
+
+ result = PyBytes_FromStringAndSize(NULL, len);
+
+ if (!result)
+ goto nomem;
+
+ /* build binary patch */
+ rb = PyBytes_AsString(result);
+ la = lb = 0;
+
+ for (h = l.next; h; h = h->next) {
+ if (h->a1 != la || h->b1 != lb) {
+ len = bl[h->b1].l - bl[lb].l;
+ putbe32((uint32_t)(al[la].l + lcommon - al->l), rb);
+ putbe32((uint32_t)(al[h->a1].l + lcommon - al->l), rb + 4);
+ putbe32((uint32_t)len, rb + 8);
+ memcpy(rb + 12, bl[lb].l, len);
+ rb += 12 + len;
+ }
+ la = h->a2;
+ lb = h->b2;
+ }
+
+nomem:
+ if (_save)
+ PyEval_RestoreThread(_save);
+ free(al);
+ free(bl);
+ bdiff_freehunks(l.next);
+ return result ? result : PyErr_NoMemory();
+}
+
+/*
+ * If allws != 0, remove all whitespace (' ', \t and \r). Otherwise,
+ * reduce whitespace sequences to a single space and trim remaining whitespace
+ * from end of lines.
+ */
+static PyObject *fixws(PyObject *self, PyObject *args)
+{
+ PyObject *s, *result = NULL;
+ char allws, c;
+ const char *r;
+ Py_ssize_t i, rlen, wlen = 0;
+ char *w;
+
+ if (!PyArg_ParseTuple(args, "Sb:fixws", &s, &allws))
+ return NULL;
+ r = PyBytes_AsString(s);
+ rlen = PyBytes_Size(s);
+
+ w = (char *)PyMem_Malloc(rlen ? rlen : 1);
+ if (!w)
+ goto nomem;
+
+ for (i = 0; i != rlen; i++) {
+ c = r[i];
+ if (c == ' ' || c == '\t' || c == '\r') {
+ if (!allws && (wlen == 0 || w[wlen - 1] != ' '))
+ w[wlen++] = ' ';
+ } else if (c == '\n' && !allws
+ && wlen > 0 && w[wlen - 1] == ' ') {
+ w[wlen - 1] = '\n';
+ } else {
+ w[wlen++] = c;
+ }
+ }
+
+ result = PyBytes_FromStringAndSize(w, wlen);
+
+nomem:
+ PyMem_Free(w);
+ return result ? result : PyErr_NoMemory();
+}
+
+
+static char mdiff_doc[] = "Efficient binary diff.";
+
+static PyMethodDef methods[] = {
+ {"bdiff", bdiff, METH_VARARGS, "calculate a binary diff\n"},
+ {"blocks", blocks, METH_VARARGS, "find a list of matching lines\n"},
+ {"fixws", fixws, METH_VARARGS, "normalize diff whitespaces\n"},
+ {NULL, NULL}
+};
+
+static const int version = 1;
+
+#ifdef IS_PY3K
+static struct PyModuleDef bdiff_module = {
+ PyModuleDef_HEAD_INIT,
+ "bdiff",
+ mdiff_doc,
+ -1,
+ methods
+};
+
+PyMODINIT_FUNC PyInit_bdiff(void)
+{
+ PyObject *m;
+ m = PyModule_Create(&bdiff_module);
+ PyModule_AddIntConstant(m, "version", version);
+ return m;
+}
+#else
+PyMODINIT_FUNC initbdiff(void)
+{
+ PyObject *m;
+ m = Py_InitModule3("bdiff", methods, mdiff_doc);
+ PyModule_AddIntConstant(m, "version", version);
+}
+#endif
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cext/diffhelpers.c Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,204 @@
+/*
+ * diffhelpers.c - helper routines for mpatch
+ *
+ * Copyright 2007 Chris Mason <chris.mason@oracle.com>
+ *
+ * This software may be used and distributed according to the terms
+ * of the GNU General Public License v2, incorporated herein by reference.
+ */
+
+#include <Python.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "util.h"
+
+static char diffhelpers_doc[] = "Efficient diff parsing";
+static PyObject *diffhelpers_Error;
+
+
+/* fixup the last lines of a and b when the patch has no newline at eof */
+static void _fix_newline(PyObject *hunk, PyObject *a, PyObject *b)
+{
+ Py_ssize_t hunksz = PyList_Size(hunk);
+ PyObject *s = PyList_GET_ITEM(hunk, hunksz-1);
+ char *l = PyBytes_AsString(s);
+ Py_ssize_t alen = PyList_Size(a);
+ Py_ssize_t blen = PyList_Size(b);
+ char c = l[0];
+ PyObject *hline;
+ Py_ssize_t sz = PyBytes_GET_SIZE(s);
+
+ if (sz > 1 && l[sz-2] == '\r')
+ /* tolerate CRLF in last line */
+ sz -= 1;
+
+ hline = PyBytes_FromStringAndSize(l, sz-1);
+ if (!hline) {
+ return;
+ }
+
+ if (c == ' ' || c == '+') {
+ PyObject *rline = PyBytes_FromStringAndSize(l + 1, sz - 2);
+ PyList_SetItem(b, blen-1, rline);
+ }
+ if (c == ' ' || c == '-') {
+ Py_INCREF(hline);
+ PyList_SetItem(a, alen-1, hline);
+ }
+ PyList_SetItem(hunk, hunksz-1, hline);
+}
+
+/* python callable form of _fix_newline */
+static PyObject *
+fix_newline(PyObject *self, PyObject *args)
+{
+ PyObject *hunk, *a, *b;
+ if (!PyArg_ParseTuple(args, "OOO", &hunk, &a, &b))
+ return NULL;
+ _fix_newline(hunk, a, b);
+ return Py_BuildValue("l", 0);
+}
+
+#if (PY_VERSION_HEX < 0x02050000)
+static const char *addlines_format = "OOiiOO";
+#else
+static const char *addlines_format = "OOnnOO";
+#endif
+
+/*
+ * read lines from fp into the hunk. The hunk is parsed into two arrays
+ * a and b. a gets the old state of the text, b gets the new state
+ * The control char from the hunk is saved when inserting into a, but not b
+ * (for performance while deleting files)
+ */
+static PyObject *
+addlines(PyObject *self, PyObject *args)
+{
+
+ PyObject *fp, *hunk, *a, *b, *x;
+ Py_ssize_t i;
+ Py_ssize_t lena, lenb;
+ Py_ssize_t num;
+ Py_ssize_t todoa, todob;
+ char *s, c;
+ PyObject *l;
+ if (!PyArg_ParseTuple(args, addlines_format,
+ &fp, &hunk, &lena, &lenb, &a, &b))
+ return NULL;
+
+ while (1) {
+ todoa = lena - PyList_Size(a);
+ todob = lenb - PyList_Size(b);
+ num = todoa > todob ? todoa : todob;
+ if (num == 0)
+ break;
+ for (i = 0; i < num; i++) {
+ x = PyFile_GetLine(fp, 0);
+ s = PyBytes_AsString(x);
+ c = *s;
+ if (strcmp(s, "\\ No newline at end of file\n") == 0) {
+ _fix_newline(hunk, a, b);
+ continue;
+ }
+ if (c == '\n') {
+ /* Some patches may be missing the control char
+ * on empty lines. Supply a leading space. */
+ Py_DECREF(x);
+ x = PyBytes_FromString(" \n");
+ }
+ PyList_Append(hunk, x);
+ if (c == '+') {
+ l = PyBytes_FromString(s + 1);
+ PyList_Append(b, l);
+ Py_DECREF(l);
+ } else if (c == '-') {
+ PyList_Append(a, x);
+ } else {
+ l = PyBytes_FromString(s + 1);
+ PyList_Append(b, l);
+ Py_DECREF(l);
+ PyList_Append(a, x);
+ }
+ Py_DECREF(x);
+ }
+ }
+ return Py_BuildValue("l", 0);
+}
+
+/*
+ * compare the lines in a with the lines in b. a is assumed to have
+ * a control char at the start of each line, this char is ignored in the
+ * compare
+ */
+static PyObject *
+testhunk(PyObject *self, PyObject *args)
+{
+
+ PyObject *a, *b;
+ long bstart;
+ Py_ssize_t alen, blen;
+ Py_ssize_t i;
+ char *sa, *sb;
+
+ if (!PyArg_ParseTuple(args, "OOl", &a, &b, &bstart))
+ return NULL;
+ alen = PyList_Size(a);
+ blen = PyList_Size(b);
+ if (alen > blen - bstart || bstart < 0) {
+ return Py_BuildValue("l", -1);
+ }
+ for (i = 0; i < alen; i++) {
+ sa = PyBytes_AsString(PyList_GET_ITEM(a, i));
+ sb = PyBytes_AsString(PyList_GET_ITEM(b, i + bstart));
+ if (strcmp(sa + 1, sb) != 0)
+ return Py_BuildValue("l", -1);
+ }
+ return Py_BuildValue("l", 0);
+}
+
+static PyMethodDef methods[] = {
+ {"addlines", addlines, METH_VARARGS, "add lines to a hunk\n"},
+ {"fix_newline", fix_newline, METH_VARARGS, "fixup newline counters\n"},
+ {"testhunk", testhunk, METH_VARARGS, "test lines in a hunk\n"},
+ {NULL, NULL}
+};
+
+static const int version = 1;
+
+#ifdef IS_PY3K
+static struct PyModuleDef diffhelpers_module = {
+ PyModuleDef_HEAD_INIT,
+ "diffhelpers",
+ diffhelpers_doc,
+ -1,
+ methods
+};
+
+PyMODINIT_FUNC PyInit_diffhelpers(void)
+{
+ PyObject *m;
+
+ m = PyModule_Create(&diffhelpers_module);
+ if (m == NULL)
+ return NULL;
+
+ diffhelpers_Error = PyErr_NewException("diffhelpers.diffhelpersError",
+ NULL, NULL);
+ Py_INCREF(diffhelpers_Error);
+ PyModule_AddObject(m, "diffhelpersError", diffhelpers_Error);
+ PyModule_AddIntConstant(m, "version", version);
+
+ return m;
+}
+#else
+PyMODINIT_FUNC
+initdiffhelpers(void)
+{
+ PyObject *m;
+ m = Py_InitModule3("diffhelpers", methods, diffhelpers_doc);
+ diffhelpers_Error = PyErr_NewException("diffhelpers.diffhelpersError",
+ NULL, NULL);
+ PyModule_AddIntConstant(m, "version", version);
+}
+#endif
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cext/dirs.c Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,315 @@
+/*
+ dirs.c - dynamic directory diddling for dirstates
+
+ Copyright 2013 Facebook
+
+ This software may be used and distributed according to the terms of
+ the GNU General Public License, incorporated herein by reference.
+*/
+
+#define PY_SSIZE_T_CLEAN
+#include <Python.h>
+#include "util.h"
+
+#ifdef IS_PY3K
+#define PYLONG_VALUE(o) ((PyLongObject *)o)->ob_digit[1]
+#else
+#define PYLONG_VALUE(o) PyInt_AS_LONG(o)
+#endif
+
+/*
+ * This is a multiset of directory names, built from the files that
+ * appear in a dirstate or manifest.
+ *
+ * A few implementation notes:
+ *
+ * We modify Python integers for refcounting, but those integers are
+ * never visible to Python code.
+ *
+ * We mutate strings in-place, but leave them immutable once they can
+ * be seen by Python code.
+ */
+typedef struct {
+ PyObject_HEAD
+ PyObject *dict;
+} dirsObject;
+
+static inline Py_ssize_t _finddir(const char *path, Py_ssize_t pos)
+{
+ while (pos != -1) {
+ if (path[pos] == '/')
+ break;
+ pos -= 1;
+ }
+
+ return pos;
+}
+
+static int _addpath(PyObject *dirs, PyObject *path)
+{
+ const char *cpath = PyBytes_AS_STRING(path);
+ Py_ssize_t pos = PyBytes_GET_SIZE(path);
+ PyObject *key = NULL;
+ int ret = -1;
+
+ /* This loop is super critical for performance. That's why we inline
+ * access to Python structs instead of going through a supported API.
+ * The implementation, therefore, is heavily dependent on CPython
+ * implementation details. We also commit violations of the Python
+ * "protocol" such as mutating immutable objects. But since we only
+ * mutate objects created in this function or in other well-defined
+ * locations, the references are known so these violations should go
+ * unnoticed. The code for adjusting the length of a PyBytesObject is
+ * essentially a minimal version of _PyBytes_Resize. */
+ while ((pos = _finddir(cpath, pos - 1)) != -1) {
+ PyObject *val;
+
+ /* It's likely that every prefix already has an entry
+ in our dict. Try to avoid allocating and
+ deallocating a string for each prefix we check. */
+ if (key != NULL)
+ ((PyBytesObject *)key)->ob_shash = -1;
+ else {
+ /* Force Python to not reuse a small shared string. */
+ key = PyBytes_FromStringAndSize(cpath,
+ pos < 2 ? 2 : pos);
+ if (key == NULL)
+ goto bail;
+ }
+ /* Py_SIZE(o) refers to the ob_size member of the struct. Yes,
+ * assigning to what looks like a function seems wrong. */
+ Py_SIZE(key) = pos;
+ ((PyBytesObject *)key)->ob_sval[pos] = '\0';
+
+ val = PyDict_GetItem(dirs, key);
+ if (val != NULL) {
+ PYLONG_VALUE(val) += 1;
+ break;
+ }
+
+ /* Force Python to not reuse a small shared int. */
+#ifdef IS_PY3K
+ val = PyLong_FromLong(0x1eadbeef);
+#else
+ val = PyInt_FromLong(0x1eadbeef);
+#endif
+
+ if (val == NULL)
+ goto bail;
+
+ PYLONG_VALUE(val) = 1;
+ ret = PyDict_SetItem(dirs, key, val);
+ Py_DECREF(val);
+ if (ret == -1)
+ goto bail;
+ Py_CLEAR(key);
+ }
+ ret = 0;
+
+bail:
+ Py_XDECREF(key);
+
+ return ret;
+}
+
+static int _delpath(PyObject *dirs, PyObject *path)
+{
+ char *cpath = PyBytes_AS_STRING(path);
+ Py_ssize_t pos = PyBytes_GET_SIZE(path);
+ PyObject *key = NULL;
+ int ret = -1;
+
+ while ((pos = _finddir(cpath, pos - 1)) != -1) {
+ PyObject *val;
+
+ key = PyBytes_FromStringAndSize(cpath, pos);
+
+ if (key == NULL)
+ goto bail;
+
+ val = PyDict_GetItem(dirs, key);
+ if (val == NULL) {
+ PyErr_SetString(PyExc_ValueError,
+ "expected a value, found none");
+ goto bail;
+ }
+
+ if (--PYLONG_VALUE(val) <= 0) {
+ if (PyDict_DelItem(dirs, key) == -1)
+ goto bail;
+ } else
+ break;
+ Py_CLEAR(key);
+ }
+ ret = 0;
+
+bail:
+ Py_XDECREF(key);
+
+ return ret;
+}
+
+static int dirs_fromdict(PyObject *dirs, PyObject *source, char skipchar)
+{
+ PyObject *key, *value;
+ Py_ssize_t pos = 0;
+
+ while (PyDict_Next(source, &pos, &key, &value)) {
+ if (!PyBytes_Check(key)) {
+ PyErr_SetString(PyExc_TypeError, "expected string key");
+ return -1;
+ }
+ if (skipchar) {
+ if (!dirstate_tuple_check(value)) {
+ PyErr_SetString(PyExc_TypeError,
+ "expected a dirstate tuple");
+ return -1;
+ }
+ if (((dirstateTupleObject *)value)->state == skipchar)
+ continue;
+ }
+
+ if (_addpath(dirs, key) == -1)
+ return -1;
+ }
+
+ return 0;
+}
+
+static int dirs_fromiter(PyObject *dirs, PyObject *source)
+{
+ PyObject *iter, *item = NULL;
+ int ret;
+
+ iter = PyObject_GetIter(source);
+ if (iter == NULL)
+ return -1;
+
+ while ((item = PyIter_Next(iter)) != NULL) {
+ if (!PyBytes_Check(item)) {
+ PyErr_SetString(PyExc_TypeError, "expected string");
+ break;
+ }
+
+ if (_addpath(dirs, item) == -1)
+ break;
+ Py_CLEAR(item);
+ }
+
+ ret = PyErr_Occurred() ? -1 : 0;
+ Py_DECREF(iter);
+ Py_XDECREF(item);
+ return ret;
+}
+
+/*
+ * Calculate a refcounted set of directory names for the files in a
+ * dirstate.
+ */
+static int dirs_init(dirsObject *self, PyObject *args)
+{
+ PyObject *dirs = NULL, *source = NULL;
+ char skipchar = 0;
+ int ret = -1;
+
+ self->dict = NULL;
+
+ if (!PyArg_ParseTuple(args, "|Oc:__init__", &source, &skipchar))
+ return -1;
+
+ dirs = PyDict_New();
+
+ if (dirs == NULL)
+ return -1;
+
+ if (source == NULL)
+ ret = 0;
+ else if (PyDict_Check(source))
+ ret = dirs_fromdict(dirs, source, skipchar);
+ else if (skipchar)
+ PyErr_SetString(PyExc_ValueError,
+ "skip character is only supported "
+ "with a dict source");
+ else
+ ret = dirs_fromiter(dirs, source);
+
+ if (ret == -1)
+ Py_XDECREF(dirs);
+ else
+ self->dict = dirs;
+
+ return ret;
+}
+
+PyObject *dirs_addpath(dirsObject *self, PyObject *args)
+{
+ PyObject *path;
+
+ if (!PyArg_ParseTuple(args, "O!:addpath", &PyBytes_Type, &path))
+ return NULL;
+
+ if (_addpath(self->dict, path) == -1)
+ return NULL;
+
+ Py_RETURN_NONE;
+}
+
+static PyObject *dirs_delpath(dirsObject *self, PyObject *args)
+{
+ PyObject *path;
+
+ if (!PyArg_ParseTuple(args, "O!:delpath", &PyBytes_Type, &path))
+ return NULL;
+
+ if (_delpath(self->dict, path) == -1)
+ return NULL;
+
+ Py_RETURN_NONE;
+}
+
+static int dirs_contains(dirsObject *self, PyObject *value)
+{
+ return PyBytes_Check(value) ? PyDict_Contains(self->dict, value) : 0;
+}
+
+static void dirs_dealloc(dirsObject *self)
+{
+ Py_XDECREF(self->dict);
+ PyObject_Del(self);
+}
+
+static PyObject *dirs_iter(dirsObject *self)
+{
+ return PyObject_GetIter(self->dict);
+}
+
+static PySequenceMethods dirs_sequence_methods;
+
+static PyMethodDef dirs_methods[] = {
+ {"addpath", (PyCFunction)dirs_addpath, METH_VARARGS, "add a path"},
+ {"delpath", (PyCFunction)dirs_delpath, METH_VARARGS, "remove a path"},
+ {NULL} /* Sentinel */
+};
+
+static PyTypeObject dirsType = { PyVarObject_HEAD_INIT(NULL, 0) };
+
+void dirs_module_init(PyObject *mod)
+{
+ dirs_sequence_methods.sq_contains = (objobjproc)dirs_contains;
+ dirsType.tp_name = "parsers.dirs";
+ dirsType.tp_new = PyType_GenericNew;
+ dirsType.tp_basicsize = sizeof(dirsObject);
+ dirsType.tp_dealloc = (destructor)dirs_dealloc;
+ dirsType.tp_as_sequence = &dirs_sequence_methods;
+ dirsType.tp_flags = Py_TPFLAGS_DEFAULT;
+ dirsType.tp_doc = "dirs";
+ dirsType.tp_iter = (getiterfunc)dirs_iter;
+ dirsType.tp_methods = dirs_methods;
+ dirsType.tp_init = (initproc)dirs_init;
+
+ if (PyType_Ready(&dirsType) < 0)
+ return;
+ Py_INCREF(&dirsType);
+
+ PyModule_AddObject(mod, "dirs", (PyObject *)&dirsType);
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cext/manifest.c Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,939 @@
+/*
+ * manifest.c - manifest type that does on-demand parsing.
+ *
+ * Copyright 2015, Google Inc.
+ *
+ * This software may be used and distributed according to the terms of
+ * the GNU General Public License, incorporated herein by reference.
+ */
+#include <Python.h>
+
+#include <assert.h>
+#include <string.h>
+#include <stdlib.h>
+
+#include "util.h"
+
+#define DEFAULT_LINES 100000
+
+typedef struct {
+ char *start;
+ Py_ssize_t len; /* length of line including terminal newline */
+ char hash_suffix;
+ bool from_malloc;
+ bool deleted;
+} line;
+
+typedef struct {
+ PyObject_HEAD
+ PyObject *pydata;
+ line *lines;
+ int numlines; /* number of line entries */
+ int livelines; /* number of non-deleted lines */
+ int maxlines; /* allocated number of lines */
+ bool dirty;
+} lazymanifest;
+
+#define MANIFEST_OOM -1
+#define MANIFEST_NOT_SORTED -2
+#define MANIFEST_MALFORMED -3
+
+/* defined in parsers.c */
+PyObject *unhexlify(const char *str, int len);
+
+/* get the length of the path for a line */
+static size_t pathlen(line *l) {
+ return strlen(l->start);
+}
+
+/* get the node value of a single line */
+static PyObject *nodeof(line *l) {
+ char *s = l->start;
+ ssize_t llen = pathlen(l);
+ PyObject *hash = unhexlify(s + llen + 1, 40);
+ if (!hash) {
+ return NULL;
+ }
+ if (l->hash_suffix != '\0') {
+ char newhash[21];
+ memcpy(newhash, PyBytes_AsString(hash), 20);
+ Py_DECREF(hash);
+ newhash[20] = l->hash_suffix;
+ hash = PyBytes_FromStringAndSize(newhash, 21);
+ }
+ return hash;
+}
+
+/* get the node hash and flags of a line as a tuple */
+static PyObject *hashflags(line *l)
+{
+ char *s = l->start;
+ size_t plen = pathlen(l);
+ PyObject *hash = nodeof(l);
+
+ /* 40 for hash, 1 for null byte, 1 for newline */
+ size_t hplen = plen + 42;
+ Py_ssize_t flen = l->len - hplen;
+ PyObject *flags;
+ PyObject *tup;
+
+ if (!hash)
+ return NULL;
+ flags = PyBytes_FromStringAndSize(s + hplen - 1, flen);
+ if (!flags) {
+ Py_DECREF(hash);
+ return NULL;
+ }
+ tup = PyTuple_Pack(2, hash, flags);
+ Py_DECREF(flags);
+ Py_DECREF(hash);
+ return tup;
+}
+
+/* if we're about to run out of space in the line index, add more */
+static bool realloc_if_full(lazymanifest *self)
+{
+ if (self->numlines == self->maxlines) {
+ self->maxlines *= 2;
+ self->lines = realloc(self->lines, self->maxlines * sizeof(line));
+ }
+ return !!self->lines;
+}
+
+/*
+ * Find the line boundaries in the manifest that 'data' points to and store
+ * information about each line in 'self'.
+ */
+static int find_lines(lazymanifest *self, char *data, Py_ssize_t len)
+{
+ char *prev = NULL;
+ while (len > 0) {
+ line *l;
+ char *next = memchr(data, '\n', len);
+ if (!next) {
+ return MANIFEST_MALFORMED;
+ }
+ next++; /* advance past newline */
+ if (!realloc_if_full(self)) {
+ return MANIFEST_OOM; /* no memory */
+ }
+ if (prev && strcmp(prev, data) > -1) {
+ /* This data isn't sorted, so we have to abort. */
+ return MANIFEST_NOT_SORTED;
+ }
+ l = self->lines + ((self->numlines)++);
+ l->start = data;
+ l->len = next - data;
+ l->hash_suffix = '\0';
+ l->from_malloc = false;
+ l->deleted = false;
+ len = len - l->len;
+ prev = data;
+ data = next;
+ }
+ self->livelines = self->numlines;
+ return 0;
+}
+
+static int lazymanifest_init(lazymanifest *self, PyObject *args)
+{
+ char *data;
+ Py_ssize_t len;
+ int err, ret;
+ PyObject *pydata;
+ if (!PyArg_ParseTuple(args, "S", &pydata)) {
+ return -1;
+ }
+ err = PyBytes_AsStringAndSize(pydata, &data, &len);
+
+ self->dirty = false;
+ if (err == -1)
+ return -1;
+ self->pydata = pydata;
+ Py_INCREF(self->pydata);
+ Py_BEGIN_ALLOW_THREADS
+ self->lines = malloc(DEFAULT_LINES * sizeof(line));
+ self->maxlines = DEFAULT_LINES;
+ self->numlines = 0;
+ if (!self->lines)
+ ret = MANIFEST_OOM;
+ else
+ ret = find_lines(self, data, len);
+ Py_END_ALLOW_THREADS
+ switch (ret) {
+ case 0:
+ break;
+ case MANIFEST_OOM:
+ PyErr_NoMemory();
+ break;
+ case MANIFEST_NOT_SORTED:
+ PyErr_Format(PyExc_ValueError,
+ "Manifest lines not in sorted order.");
+ break;
+ case MANIFEST_MALFORMED:
+ PyErr_Format(PyExc_ValueError,
+ "Manifest did not end in a newline.");
+ break;
+ default:
+ PyErr_Format(PyExc_ValueError,
+ "Unknown problem parsing manifest.");
+ }
+ return ret == 0 ? 0 : -1;
+}
+
+static void lazymanifest_dealloc(lazymanifest *self)
+{
+ /* free any extra lines we had to allocate */
+ int i;
+ for (i = 0; i < self->numlines; i++) {
+ if (self->lines[i].from_malloc) {
+ free(self->lines[i].start);
+ }
+ }
+ if (self->lines) {
+ free(self->lines);
+ self->lines = NULL;
+ }
+ if (self->pydata) {
+ Py_DECREF(self->pydata);
+ self->pydata = NULL;
+ }
+ PyObject_Del(self);
+}
+
+/* iteration support */
+
+typedef struct {
+ PyObject_HEAD lazymanifest *m;
+ Py_ssize_t pos;
+} lmIter;
+
+static void lmiter_dealloc(PyObject *o)
+{
+ lmIter *self = (lmIter *)o;
+ Py_DECREF(self->m);
+ PyObject_Del(self);
+}
+
+static line *lmiter_nextline(lmIter *self)
+{
+ do {
+ self->pos++;
+ if (self->pos >= self->m->numlines) {
+ return NULL;
+ }
+ /* skip over deleted manifest entries */
+ } while (self->m->lines[self->pos].deleted);
+ return self->m->lines + self->pos;
+}
+
+static PyObject *lmiter_iterentriesnext(PyObject *o)
+{
+ size_t pl;
+ line *l;
+ Py_ssize_t consumed;
+ PyObject *ret = NULL, *path = NULL, *hash = NULL, *flags = NULL;
+ l = lmiter_nextline((lmIter *)o);
+ if (!l) {
+ goto done;
+ }
+ pl = pathlen(l);
+ path = PyBytes_FromStringAndSize(l->start, pl);
+ hash = nodeof(l);
+ consumed = pl + 41;
+ flags = PyBytes_FromStringAndSize(l->start + consumed,
+ l->len - consumed - 1);
+ if (!path || !hash || !flags) {
+ goto done;
+ }
+ ret = PyTuple_Pack(3, path, hash, flags);
+done:
+ Py_XDECREF(path);
+ Py_XDECREF(hash);
+ Py_XDECREF(flags);
+ return ret;
+}
+
+#ifdef IS_PY3K
+#define LAZYMANIFESTENTRIESITERATOR_TPFLAGS Py_TPFLAGS_DEFAULT
+#else
+#define LAZYMANIFESTENTRIESITERATOR_TPFLAGS Py_TPFLAGS_DEFAULT \
+ | Py_TPFLAGS_HAVE_ITER
+#endif
+
+static PyTypeObject lazymanifestEntriesIterator = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "parsers.lazymanifest.entriesiterator", /*tp_name */
+ sizeof(lmIter), /*tp_basicsize */
+ 0, /*tp_itemsize */
+ lmiter_dealloc, /*tp_dealloc */
+ 0, /*tp_print */
+ 0, /*tp_getattr */
+ 0, /*tp_setattr */
+ 0, /*tp_compare */
+ 0, /*tp_repr */
+ 0, /*tp_as_number */
+ 0, /*tp_as_sequence */
+ 0, /*tp_as_mapping */
+ 0, /*tp_hash */
+ 0, /*tp_call */
+ 0, /*tp_str */
+ 0, /*tp_getattro */
+ 0, /*tp_setattro */
+ 0, /*tp_as_buffer */
+ LAZYMANIFESTENTRIESITERATOR_TPFLAGS, /* tp_flags */
+ "Iterator for 3-tuples in a lazymanifest.", /* tp_doc */
+ 0, /* tp_traverse */
+ 0, /* tp_clear */
+ 0, /* tp_richcompare */
+ 0, /* tp_weaklistoffset */
+ PyObject_SelfIter, /* tp_iter: __iter__() method */
+ lmiter_iterentriesnext, /* tp_iternext: next() method */
+};
+
+static PyObject *lmiter_iterkeysnext(PyObject *o)
+{
+ size_t pl;
+ line *l = lmiter_nextline((lmIter *)o);
+ if (!l) {
+ return NULL;
+ }
+ pl = pathlen(l);
+ return PyBytes_FromStringAndSize(l->start, pl);
+}
+
+#ifdef IS_PY3K
+#define LAZYMANIFESTKEYSITERATOR_TPFLAGS Py_TPFLAGS_DEFAULT
+#else
+#define LAZYMANIFESTKEYSITERATOR_TPFLAGS Py_TPFLAGS_DEFAULT \
+ | Py_TPFLAGS_HAVE_ITER
+#endif
+
+static PyTypeObject lazymanifestKeysIterator = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "parsers.lazymanifest.keysiterator", /*tp_name */
+ sizeof(lmIter), /*tp_basicsize */
+ 0, /*tp_itemsize */
+ lmiter_dealloc, /*tp_dealloc */
+ 0, /*tp_print */
+ 0, /*tp_getattr */
+ 0, /*tp_setattr */
+ 0, /*tp_compare */
+ 0, /*tp_repr */
+ 0, /*tp_as_number */
+ 0, /*tp_as_sequence */
+ 0, /*tp_as_mapping */
+ 0, /*tp_hash */
+ 0, /*tp_call */
+ 0, /*tp_str */
+ 0, /*tp_getattro */
+ 0, /*tp_setattro */
+ 0, /*tp_as_buffer */
+ LAZYMANIFESTKEYSITERATOR_TPFLAGS, /* tp_flags */
+ "Keys iterator for a lazymanifest.", /* tp_doc */
+ 0, /* tp_traverse */
+ 0, /* tp_clear */
+ 0, /* tp_richcompare */
+ 0, /* tp_weaklistoffset */
+ PyObject_SelfIter, /* tp_iter: __iter__() method */
+ lmiter_iterkeysnext, /* tp_iternext: next() method */
+};
+
+static lazymanifest *lazymanifest_copy(lazymanifest *self);
+
+static PyObject *lazymanifest_getentriesiter(lazymanifest *self)
+{
+ lmIter *i = NULL;
+ lazymanifest *t = lazymanifest_copy(self);
+ if (!t) {
+ PyErr_NoMemory();
+ return NULL;
+ }
+ i = PyObject_New(lmIter, &lazymanifestEntriesIterator);
+ if (i) {
+ i->m = t;
+ i->pos = -1;
+ } else {
+ Py_DECREF(t);
+ PyErr_NoMemory();
+ }
+ return (PyObject *)i;
+}
+
+static PyObject *lazymanifest_getkeysiter(lazymanifest *self)
+{
+ lmIter *i = NULL;
+ lazymanifest *t = lazymanifest_copy(self);
+ if (!t) {
+ PyErr_NoMemory();
+ return NULL;
+ }
+ i = PyObject_New(lmIter, &lazymanifestKeysIterator);
+ if (i) {
+ i->m = t;
+ i->pos = -1;
+ } else {
+ Py_DECREF(t);
+ PyErr_NoMemory();
+ }
+ return (PyObject *)i;
+}
+
+/* __getitem__ and __setitem__ support */
+
+static Py_ssize_t lazymanifest_size(lazymanifest *self)
+{
+ return self->livelines;
+}
+
+static int linecmp(const void *left, const void *right)
+{
+ return strcmp(((const line *)left)->start,
+ ((const line *)right)->start);
+}
+
+static PyObject *lazymanifest_getitem(lazymanifest *self, PyObject *key)
+{
+ line needle;
+ line *hit;
+ if (!PyBytes_Check(key)) {
+ PyErr_Format(PyExc_TypeError,
+ "getitem: manifest keys must be a string.");
+ return NULL;
+ }
+ needle.start = PyBytes_AsString(key);
+ hit = bsearch(&needle, self->lines, self->numlines, sizeof(line),
+ &linecmp);
+ if (!hit || hit->deleted) {
+ PyErr_Format(PyExc_KeyError, "No such manifest entry.");
+ return NULL;
+ }
+ return hashflags(hit);
+}
+
+static int lazymanifest_delitem(lazymanifest *self, PyObject *key)
+{
+ line needle;
+ line *hit;
+ if (!PyBytes_Check(key)) {
+ PyErr_Format(PyExc_TypeError,
+ "delitem: manifest keys must be a string.");
+ return -1;
+ }
+ needle.start = PyBytes_AsString(key);
+ hit = bsearch(&needle, self->lines, self->numlines, sizeof(line),
+ &linecmp);
+ if (!hit || hit->deleted) {
+ PyErr_Format(PyExc_KeyError,
+ "Tried to delete nonexistent manifest entry.");
+ return -1;
+ }
+ self->dirty = true;
+ hit->deleted = true;
+ self->livelines--;
+ return 0;
+}
+
+/* Do a binary search for the insertion point for new, creating the
+ * new entry if needed. */
+static int internalsetitem(lazymanifest *self, line *new) {
+ int start = 0, end = self->numlines;
+ while (start < end) {
+ int pos = start + (end - start) / 2;
+ int c = linecmp(new, self->lines + pos);
+ if (c < 0)
+ end = pos;
+ else if (c > 0)
+ start = pos + 1;
+ else {
+ if (self->lines[pos].deleted)
+ self->livelines++;
+ if (self->lines[pos].from_malloc)
+ free(self->lines[pos].start);
+ start = pos;
+ goto finish;
+ }
+ }
+ /* being here means we need to do an insert */
+ if (!realloc_if_full(self)) {
+ PyErr_NoMemory();
+ return -1;
+ }
+ memmove(self->lines + start + 1, self->lines + start,
+ (self->numlines - start) * sizeof(line));
+ self->numlines++;
+ self->livelines++;
+finish:
+ self->lines[start] = *new;
+ self->dirty = true;
+ return 0;
+}
+
+static int lazymanifest_setitem(
+ lazymanifest *self, PyObject *key, PyObject *value)
+{
+ char *path;
+ Py_ssize_t plen;
+ PyObject *pyhash;
+ Py_ssize_t hlen;
+ char *hash;
+ PyObject *pyflags;
+ char *flags;
+ Py_ssize_t flen;
+ size_t dlen;
+ char *dest;
+ int i;
+ line new;
+ if (!PyBytes_Check(key)) {
+ PyErr_Format(PyExc_TypeError,
+ "setitem: manifest keys must be a string.");
+ return -1;
+ }
+ if (!value) {
+ return lazymanifest_delitem(self, key);
+ }
+ if (!PyTuple_Check(value) || PyTuple_Size(value) != 2) {
+ PyErr_Format(PyExc_TypeError,
+ "Manifest values must be a tuple of (node, flags).");
+ return -1;
+ }
+ if (PyBytes_AsStringAndSize(key, &path, &plen) == -1) {
+ return -1;
+ }
+
+ pyhash = PyTuple_GetItem(value, 0);
+ if (!PyBytes_Check(pyhash)) {
+ PyErr_Format(PyExc_TypeError,
+ "node must be a 20-byte string");
+ return -1;
+ }
+ hlen = PyBytes_Size(pyhash);
+ /* Some parts of the codebase try and set 21 or 22
+ * byte "hash" values in order to perturb things for
+ * status. We have to preserve at least the 21st
+ * byte. Sigh. If there's a 22nd byte, we drop it on
+ * the floor, which works fine.
+ */
+ if (hlen != 20 && hlen != 21 && hlen != 22) {
+ PyErr_Format(PyExc_TypeError,
+ "node must be a 20-byte string");
+ return -1;
+ }
+ hash = PyBytes_AsString(pyhash);
+
+ pyflags = PyTuple_GetItem(value, 1);
+ if (!PyBytes_Check(pyflags) || PyBytes_Size(pyflags) > 1) {
+ PyErr_Format(PyExc_TypeError,
+ "flags must a 0 or 1 byte string");
+ return -1;
+ }
+ if (PyBytes_AsStringAndSize(pyflags, &flags, &flen) == -1) {
+ return -1;
+ }
+ /* one null byte and one newline */
+ dlen = plen + 41 + flen + 1;
+ dest = malloc(dlen);
+ if (!dest) {
+ PyErr_NoMemory();
+ return -1;
+ }
+ memcpy(dest, path, plen + 1);
+ for (i = 0; i < 20; i++) {
+ /* Cast to unsigned, so it will not get sign-extended when promoted
+ * to int (as is done when passing to a variadic function)
+ */
+ sprintf(dest + plen + 1 + (i * 2), "%02x", (unsigned char)hash[i]);
+ }
+ memcpy(dest + plen + 41, flags, flen);
+ dest[plen + 41 + flen] = '\n';
+ new.start = dest;
+ new.len = dlen;
+ new.hash_suffix = '\0';
+ if (hlen > 20) {
+ new.hash_suffix = hash[20];
+ }
+ new.from_malloc = true; /* is `start` a pointer we allocated? */
+ new.deleted = false; /* is this entry deleted? */
+ if (internalsetitem(self, &new)) {
+ return -1;
+ }
+ return 0;
+}
+
+static PyMappingMethods lazymanifest_mapping_methods = {
+ (lenfunc)lazymanifest_size, /* mp_length */
+ (binaryfunc)lazymanifest_getitem, /* mp_subscript */
+ (objobjargproc)lazymanifest_setitem, /* mp_ass_subscript */
+};
+
+/* sequence methods (important or __contains__ builds an iterator) */
+
+static int lazymanifest_contains(lazymanifest *self, PyObject *key)
+{
+ line needle;
+ line *hit;
+ if (!PyBytes_Check(key)) {
+ /* Our keys are always strings, so if the contains
+ * check is for a non-string, just return false. */
+ return 0;
+ }
+ needle.start = PyBytes_AsString(key);
+ hit = bsearch(&needle, self->lines, self->numlines, sizeof(line),
+ &linecmp);
+ if (!hit || hit->deleted) {
+ return 0;
+ }
+ return 1;
+}
+
+static PySequenceMethods lazymanifest_seq_meths = {
+ (lenfunc)lazymanifest_size, /* sq_length */
+ 0, /* sq_concat */
+ 0, /* sq_repeat */
+ 0, /* sq_item */
+ 0, /* sq_slice */
+ 0, /* sq_ass_item */
+ 0, /* sq_ass_slice */
+ (objobjproc)lazymanifest_contains, /* sq_contains */
+ 0, /* sq_inplace_concat */
+ 0, /* sq_inplace_repeat */
+};
+
+
+/* Other methods (copy, diff, etc) */
+static PyTypeObject lazymanifestType;
+
+/* If the manifest has changes, build the new manifest text and reindex it. */
+static int compact(lazymanifest *self) {
+ int i;
+ ssize_t need = 0;
+ char *data;
+ line *src, *dst;
+ PyObject *pydata;
+ if (!self->dirty)
+ return 0;
+ for (i = 0; i < self->numlines; i++) {
+ if (!self->lines[i].deleted) {
+ need += self->lines[i].len;
+ }
+ }
+ pydata = PyBytes_FromStringAndSize(NULL, need);
+ if (!pydata)
+ return -1;
+ data = PyBytes_AsString(pydata);
+ if (!data) {
+ return -1;
+ }
+ src = self->lines;
+ dst = self->lines;
+ for (i = 0; i < self->numlines; i++, src++) {
+ char *tofree = NULL;
+ if (src->from_malloc) {
+ tofree = src->start;
+ }
+ if (!src->deleted) {
+ memcpy(data, src->start, src->len);
+ *dst = *src;
+ dst->start = data;
+ dst->from_malloc = false;
+ data += dst->len;
+ dst++;
+ }
+ free(tofree);
+ }
+ Py_DECREF(self->pydata);
+ self->pydata = pydata;
+ self->numlines = self->livelines;
+ self->dirty = false;
+ return 0;
+}
+
+static PyObject *lazymanifest_text(lazymanifest *self)
+{
+ if (compact(self) != 0) {
+ PyErr_NoMemory();
+ return NULL;
+ }
+ Py_INCREF(self->pydata);
+ return self->pydata;
+}
+
+static lazymanifest *lazymanifest_copy(lazymanifest *self)
+{
+ lazymanifest *copy = NULL;
+ if (compact(self) != 0) {
+ goto nomem;
+ }
+ copy = PyObject_New(lazymanifest, &lazymanifestType);
+ if (!copy) {
+ goto nomem;
+ }
+ copy->numlines = self->numlines;
+ copy->livelines = self->livelines;
+ copy->dirty = false;
+ copy->lines = malloc(self->maxlines *sizeof(line));
+ if (!copy->lines) {
+ goto nomem;
+ }
+ memcpy(copy->lines, self->lines, self->numlines * sizeof(line));
+ copy->maxlines = self->maxlines;
+ copy->pydata = self->pydata;
+ Py_INCREF(copy->pydata);
+ return copy;
+nomem:
+ PyErr_NoMemory();
+ Py_XDECREF(copy);
+ return NULL;
+}
+
+static lazymanifest *lazymanifest_filtercopy(
+ lazymanifest *self, PyObject *matchfn)
+{
+ lazymanifest *copy = NULL;
+ int i;
+ if (!PyCallable_Check(matchfn)) {
+ PyErr_SetString(PyExc_TypeError, "matchfn must be callable");
+ return NULL;
+ }
+ /* compact ourselves first to avoid double-frees later when we
+ * compact tmp so that it doesn't have random pointers to our
+ * underlying from_malloc-data (self->pydata is safe) */
+ if (compact(self) != 0) {
+ goto nomem;
+ }
+ copy = PyObject_New(lazymanifest, &lazymanifestType);
+ if (!copy) {
+ goto nomem;
+ }
+ copy->dirty = true;
+ copy->lines = malloc(self->maxlines * sizeof(line));
+ if (!copy->lines) {
+ goto nomem;
+ }
+ copy->maxlines = self->maxlines;
+ copy->numlines = 0;
+ copy->pydata = self->pydata;
+ Py_INCREF(self->pydata);
+ for (i = 0; i < self->numlines; i++) {
+ PyObject *arglist = NULL, *result = NULL;
+ arglist = Py_BuildValue("(s)", self->lines[i].start);
+ if (!arglist) {
+ return NULL;
+ }
+ result = PyObject_CallObject(matchfn, arglist);
+ Py_DECREF(arglist);
+ /* if the callback raised an exception, just let it
+ * through and give up */
+ if (!result) {
+ free(copy->lines);
+ Py_DECREF(self->pydata);
+ return NULL;
+ }
+ if (PyObject_IsTrue(result)) {
+ assert(!(self->lines[i].from_malloc));
+ copy->lines[copy->numlines++] = self->lines[i];
+ }
+ Py_DECREF(result);
+ }
+ copy->livelines = copy->numlines;
+ return copy;
+nomem:
+ PyErr_NoMemory();
+ Py_XDECREF(copy);
+ return NULL;
+}
+
+static PyObject *lazymanifest_diff(lazymanifest *self, PyObject *args)
+{
+ lazymanifest *other;
+ PyObject *pyclean = NULL;
+ bool listclean;
+ PyObject *emptyTup = NULL, *ret = NULL;
+ PyObject *es;
+ int sneedle = 0, oneedle = 0;
+ if (!PyArg_ParseTuple(args, "O!|O", &lazymanifestType, &other, &pyclean)) {
+ return NULL;
+ }
+ listclean = (!pyclean) ? false : PyObject_IsTrue(pyclean);
+ es = PyBytes_FromString("");
+ if (!es) {
+ goto nomem;
+ }
+ emptyTup = PyTuple_Pack(2, Py_None, es);
+ Py_DECREF(es);
+ if (!emptyTup) {
+ goto nomem;
+ }
+ ret = PyDict_New();
+ if (!ret) {
+ goto nomem;
+ }
+ while (sneedle != self->numlines || oneedle != other->numlines) {
+ line *left = self->lines + sneedle;
+ line *right = other->lines + oneedle;
+ int result;
+ PyObject *key;
+ PyObject *outer;
+ /* If we're looking at a deleted entry and it's not
+ * the end of the manifest, just skip it. */
+ if (left->deleted && sneedle < self->numlines) {
+ sneedle++;
+ continue;
+ }
+ if (right->deleted && oneedle < other->numlines) {
+ oneedle++;
+ continue;
+ }
+ /* if we're at the end of either manifest, then we
+ * know the remaining items are adds so we can skip
+ * the strcmp. */
+ if (sneedle == self->numlines) {
+ result = 1;
+ } else if (oneedle == other->numlines) {
+ result = -1;
+ } else {
+ result = linecmp(left, right);
+ }
+ key = result <= 0 ?
+ PyBytes_FromString(left->start) :
+ PyBytes_FromString(right->start);
+ if (!key)
+ goto nomem;
+ if (result < 0) {
+ PyObject *l = hashflags(left);
+ if (!l) {
+ goto nomem;
+ }
+ outer = PyTuple_Pack(2, l, emptyTup);
+ Py_DECREF(l);
+ if (!outer) {
+ goto nomem;
+ }
+ PyDict_SetItem(ret, key, outer);
+ Py_DECREF(outer);
+ sneedle++;
+ } else if (result > 0) {
+ PyObject *r = hashflags(right);
+ if (!r) {
+ goto nomem;
+ }
+ outer = PyTuple_Pack(2, emptyTup, r);
+ Py_DECREF(r);
+ if (!outer) {
+ goto nomem;
+ }
+ PyDict_SetItem(ret, key, outer);
+ Py_DECREF(outer);
+ oneedle++;
+ } else {
+ /* file exists in both manifests */
+ if (left->len != right->len
+ || memcmp(left->start, right->start, left->len)
+ || left->hash_suffix != right->hash_suffix) {
+ PyObject *l = hashflags(left);
+ PyObject *r;
+ if (!l) {
+ goto nomem;
+ }
+ r = hashflags(right);
+ if (!r) {
+ Py_DECREF(l);
+ goto nomem;
+ }
+ outer = PyTuple_Pack(2, l, r);
+ Py_DECREF(l);
+ Py_DECREF(r);
+ if (!outer) {
+ goto nomem;
+ }
+ PyDict_SetItem(ret, key, outer);
+ Py_DECREF(outer);
+ } else if (listclean) {
+ PyDict_SetItem(ret, key, Py_None);
+ }
+ sneedle++;
+ oneedle++;
+ }
+ Py_DECREF(key);
+ }
+ Py_DECREF(emptyTup);
+ return ret;
+nomem:
+ PyErr_NoMemory();
+ Py_XDECREF(ret);
+ Py_XDECREF(emptyTup);
+ return NULL;
+}
+
+static PyMethodDef lazymanifest_methods[] = {
+ {"iterkeys", (PyCFunction)lazymanifest_getkeysiter, METH_NOARGS,
+ "Iterate over file names in this lazymanifest."},
+ {"iterentries", (PyCFunction)lazymanifest_getentriesiter, METH_NOARGS,
+ "Iterate over (path, nodeid, flags) tuples in this lazymanifest."},
+ {"copy", (PyCFunction)lazymanifest_copy, METH_NOARGS,
+ "Make a copy of this lazymanifest."},
+ {"filtercopy", (PyCFunction)lazymanifest_filtercopy, METH_O,
+ "Make a copy of this manifest filtered by matchfn."},
+ {"diff", (PyCFunction)lazymanifest_diff, METH_VARARGS,
+ "Compare this lazymanifest to another one."},
+ {"text", (PyCFunction)lazymanifest_text, METH_NOARGS,
+ "Encode this manifest to text."},
+ {NULL},
+};
+
+#ifdef IS_PY3K
+#define LAZYMANIFEST_TPFLAGS Py_TPFLAGS_DEFAULT
+#else
+#define LAZYMANIFEST_TPFLAGS Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_SEQUENCE_IN
+#endif
+
+static PyTypeObject lazymanifestType = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "parsers.lazymanifest", /* tp_name */
+ sizeof(lazymanifest), /* tp_basicsize */
+ 0, /* tp_itemsize */
+ (destructor)lazymanifest_dealloc, /* tp_dealloc */
+ 0, /* tp_print */
+ 0, /* tp_getattr */
+ 0, /* tp_setattr */
+ 0, /* tp_compare */
+ 0, /* tp_repr */
+ 0, /* tp_as_number */
+ &lazymanifest_seq_meths, /* tp_as_sequence */
+ &lazymanifest_mapping_methods, /* tp_as_mapping */
+ 0, /* tp_hash */
+ 0, /* tp_call */
+ 0, /* tp_str */
+ 0, /* tp_getattro */
+ 0, /* tp_setattro */
+ 0, /* tp_as_buffer */
+ LAZYMANIFEST_TPFLAGS, /* tp_flags */
+ "TODO(augie)", /* tp_doc */
+ 0, /* tp_traverse */
+ 0, /* tp_clear */
+ 0, /* tp_richcompare */
+ 0, /* tp_weaklistoffset */
+ (getiterfunc)lazymanifest_getkeysiter, /* tp_iter */
+ 0, /* tp_iternext */
+ lazymanifest_methods, /* tp_methods */
+ 0, /* tp_members */
+ 0, /* tp_getset */
+ 0, /* tp_base */
+ 0, /* tp_dict */
+ 0, /* tp_descr_get */
+ 0, /* tp_descr_set */
+ 0, /* tp_dictoffset */
+ (initproc)lazymanifest_init, /* tp_init */
+ 0, /* tp_alloc */
+};
+
+void manifest_module_init(PyObject * mod)
+{
+ lazymanifestType.tp_new = PyType_GenericNew;
+ if (PyType_Ready(&lazymanifestType) < 0)
+ return;
+ Py_INCREF(&lazymanifestType);
+
+ PyModule_AddObject(mod, "lazymanifest",
+ (PyObject *)&lazymanifestType);
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cext/mpatch.c Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,200 @@
+/*
+ mpatch.c - efficient binary patching for Mercurial
+
+ This implements a patch algorithm that's O(m + nlog n) where m is the
+ size of the output and n is the number of patches.
+
+ Given a list of binary patches, it unpacks each into a hunk list,
+ then combines the hunk lists with a treewise recursion to form a
+ single hunk list. This hunk list is then applied to the original
+ text.
+
+ The text (or binary) fragments are copied directly from their source
+ Python objects into a preallocated output string to avoid the
+ allocation of intermediate Python objects. Working memory is about 2x
+ the total number of hunks.
+
+ Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
+
+ This software may be used and distributed according to the terms
+ of the GNU General Public License, incorporated herein by reference.
+*/
+
+#define PY_SSIZE_T_CLEAN
+#include <Python.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "util.h"
+#include "bitmanipulation.h"
+#include "compat.h"
+#include "mpatch.h"
+
+static char mpatch_doc[] = "Efficient binary patching.";
+static PyObject *mpatch_Error;
+
+static void setpyerr(int r)
+{
+ switch (r) {
+ case MPATCH_ERR_NO_MEM:
+ PyErr_NoMemory();
+ break;
+ case MPATCH_ERR_CANNOT_BE_DECODED:
+ PyErr_SetString(mpatch_Error, "patch cannot be decoded");
+ break;
+ case MPATCH_ERR_INVALID_PATCH:
+ PyErr_SetString(mpatch_Error, "invalid patch");
+ break;
+ }
+}
+
+struct mpatch_flist *cpygetitem(void *bins, ssize_t pos)
+{
+ const char *buffer;
+ struct mpatch_flist *res;
+ ssize_t blen;
+ int r;
+
+ PyObject *tmp = PyList_GetItem((PyObject*)bins, pos);
+ if (!tmp)
+ return NULL;
+ if (PyObject_AsCharBuffer(tmp, &buffer, (Py_ssize_t*)&blen))
+ return NULL;
+ if ((r = mpatch_decode(buffer, blen, &res)) < 0) {
+ if (!PyErr_Occurred())
+ setpyerr(r);
+ return NULL;
+ }
+ return res;
+}
+
+static PyObject *
+patches(PyObject *self, PyObject *args)
+{
+ PyObject *text, *bins, *result;
+ struct mpatch_flist *patch;
+ const char *in;
+ int r = 0;
+ char *out;
+ Py_ssize_t len, outlen, inlen;
+
+ if (!PyArg_ParseTuple(args, "OO:mpatch", &text, &bins))
+ return NULL;
+
+ len = PyList_Size(bins);
+ if (!len) {
+ /* nothing to do */
+ Py_INCREF(text);
+ return text;
+ }
+
+ if (PyObject_AsCharBuffer(text, &in, &inlen))
+ return NULL;
+
+ patch = mpatch_fold(bins, cpygetitem, 0, len);
+ if (!patch) { /* error already set or memory error */
+ if (!PyErr_Occurred())
+ PyErr_NoMemory();
+ return NULL;
+ }
+
+ outlen = mpatch_calcsize(inlen, patch);
+ if (outlen < 0) {
+ r = (int)outlen;
+ result = NULL;
+ goto cleanup;
+ }
+ result = PyBytes_FromStringAndSize(NULL, outlen);
+ if (!result) {
+ result = NULL;
+ goto cleanup;
+ }
+ out = PyBytes_AsString(result);
+ if ((r = mpatch_apply(out, in, inlen, patch)) < 0) {
+ Py_DECREF(result);
+ result = NULL;
+ }
+cleanup:
+ mpatch_lfree(patch);
+ if (!result && !PyErr_Occurred())
+ setpyerr(r);
+ return result;
+}
+
+/* calculate size of a patched file directly */
+static PyObject *
+patchedsize(PyObject *self, PyObject *args)
+{
+ long orig, start, end, len, outlen = 0, last = 0, pos = 0;
+ Py_ssize_t patchlen;
+ char *bin;
+
+ if (!PyArg_ParseTuple(args, "ls#", &orig, &bin, &patchlen))
+ return NULL;
+
+ while (pos >= 0 && pos < patchlen) {
+ start = getbe32(bin + pos);
+ end = getbe32(bin + pos + 4);
+ len = getbe32(bin + pos + 8);
+ if (start > end)
+ break; /* sanity check */
+ pos += 12 + len;
+ outlen += start - last;
+ last = end;
+ outlen += len;
+ }
+
+ if (pos != patchlen) {
+ if (!PyErr_Occurred())
+ PyErr_SetString(mpatch_Error, "patch cannot be decoded");
+ return NULL;
+ }
+
+ outlen += orig - last;
+ return Py_BuildValue("l", outlen);
+}
+
+static PyMethodDef methods[] = {
+ {"patches", patches, METH_VARARGS, "apply a series of patches\n"},
+ {"patchedsize", patchedsize, METH_VARARGS, "calculed patched size\n"},
+ {NULL, NULL}
+};
+
+static const int version = 1;
+
+#ifdef IS_PY3K
+static struct PyModuleDef mpatch_module = {
+ PyModuleDef_HEAD_INIT,
+ "mpatch",
+ mpatch_doc,
+ -1,
+ methods
+};
+
+PyMODINIT_FUNC PyInit_mpatch(void)
+{
+ PyObject *m;
+
+ m = PyModule_Create(&mpatch_module);
+ if (m == NULL)
+ return NULL;
+
+ mpatch_Error = PyErr_NewException("mercurial.cext.mpatch.mpatchError",
+ NULL, NULL);
+ Py_INCREF(mpatch_Error);
+ PyModule_AddObject(m, "mpatchError", mpatch_Error);
+ PyModule_AddIntConstant(m, "version", version);
+
+ return m;
+}
+#else
+PyMODINIT_FUNC
+initmpatch(void)
+{
+ PyObject *m;
+ m = Py_InitModule3("mpatch", methods, mpatch_doc);
+ mpatch_Error = PyErr_NewException("mercurial.cext.mpatch.mpatchError",
+ NULL, NULL);
+ PyModule_AddIntConstant(m, "version", version);
+}
+#endif
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cext/osutil.c Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,1335 @@
+/*
+ osutil.c - native operating system services
+
+ Copyright 2007 Matt Mackall and others
+
+ This software may be used and distributed according to the terms of
+ the GNU General Public License, incorporated herein by reference.
+*/
+
+#define _ATFILE_SOURCE
+#include <Python.h>
+#include <fcntl.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <errno.h>
+
+#ifdef _WIN32
+#include <windows.h>
+#include <io.h>
+#else
+#include <dirent.h>
+#include <sys/socket.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <unistd.h>
+#ifdef HAVE_LINUX_STATFS
+#include <linux/magic.h>
+#include <sys/vfs.h>
+#endif
+#ifdef HAVE_BSD_STATFS
+#include <sys/mount.h>
+#include <sys/param.h>
+#endif
+#endif
+
+#ifdef __APPLE__
+#include <sys/attr.h>
+#include <sys/vnode.h>
+#endif
+
+#include "util.h"
+
+/* some platforms lack the PATH_MAX definition (eg. GNU/Hurd) */
+#ifndef PATH_MAX
+#define PATH_MAX 4096
+#endif
+
+#ifdef _WIN32
+/*
+stat struct compatible with hg expectations
+Mercurial only uses st_mode, st_size and st_mtime
+the rest is kept to minimize changes between implementations
+*/
+struct hg_stat {
+ int st_dev;
+ int st_mode;
+ int st_nlink;
+ __int64 st_size;
+ int st_mtime;
+ int st_ctime;
+};
+struct listdir_stat {
+ PyObject_HEAD
+ struct hg_stat st;
+};
+#else
+struct listdir_stat {
+ PyObject_HEAD
+ struct stat st;
+};
+#endif
+
+#ifdef IS_PY3K
+#define listdir_slot(name) \
+ static PyObject *listdir_stat_##name(PyObject *self, void *x) \
+ { \
+ return PyLong_FromLong(((struct listdir_stat *)self)->st.name); \
+ }
+#else
+#define listdir_slot(name) \
+ static PyObject *listdir_stat_##name(PyObject *self, void *x) \
+ { \
+ return PyInt_FromLong(((struct listdir_stat *)self)->st.name); \
+ }
+#endif
+
+listdir_slot(st_dev)
+listdir_slot(st_mode)
+listdir_slot(st_nlink)
+#ifdef _WIN32
+static PyObject *listdir_stat_st_size(PyObject *self, void *x)
+{
+ return PyLong_FromLongLong(
+ (PY_LONG_LONG)((struct listdir_stat *)self)->st.st_size);
+}
+#else
+listdir_slot(st_size)
+#endif
+listdir_slot(st_mtime)
+listdir_slot(st_ctime)
+
+static struct PyGetSetDef listdir_stat_getsets[] = {
+ {"st_dev", listdir_stat_st_dev, 0, 0, 0},
+ {"st_mode", listdir_stat_st_mode, 0, 0, 0},
+ {"st_nlink", listdir_stat_st_nlink, 0, 0, 0},
+ {"st_size", listdir_stat_st_size, 0, 0, 0},
+ {"st_mtime", listdir_stat_st_mtime, 0, 0, 0},
+ {"st_ctime", listdir_stat_st_ctime, 0, 0, 0},
+ {0, 0, 0, 0, 0}
+};
+
+static PyObject *listdir_stat_new(PyTypeObject *t, PyObject *a, PyObject *k)
+{
+ return t->tp_alloc(t, 0);
+}
+
+static void listdir_stat_dealloc(PyObject *o)
+{
+ o->ob_type->tp_free(o);
+}
+
+static PyTypeObject listdir_stat_type = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "osutil.stat", /*tp_name*/
+ sizeof(struct listdir_stat), /*tp_basicsize*/
+ 0, /*tp_itemsize*/
+ (destructor)listdir_stat_dealloc, /*tp_dealloc*/
+ 0, /*tp_print*/
+ 0, /*tp_getattr*/
+ 0, /*tp_setattr*/
+ 0, /*tp_compare*/
+ 0, /*tp_repr*/
+ 0, /*tp_as_number*/
+ 0, /*tp_as_sequence*/
+ 0, /*tp_as_mapping*/
+ 0, /*tp_hash */
+ 0, /*tp_call*/
+ 0, /*tp_str*/
+ 0, /*tp_getattro*/
+ 0, /*tp_setattro*/
+ 0, /*tp_as_buffer*/
+ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /*tp_flags*/
+ "stat objects", /* tp_doc */
+ 0, /* tp_traverse */
+ 0, /* tp_clear */
+ 0, /* tp_richcompare */
+ 0, /* tp_weaklistoffset */
+ 0, /* tp_iter */
+ 0, /* tp_iternext */
+ 0, /* tp_methods */
+ 0, /* tp_members */
+ listdir_stat_getsets, /* tp_getset */
+ 0, /* tp_base */
+ 0, /* tp_dict */
+ 0, /* tp_descr_get */
+ 0, /* tp_descr_set */
+ 0, /* tp_dictoffset */
+ 0, /* tp_init */
+ 0, /* tp_alloc */
+ listdir_stat_new, /* tp_new */
+};
+
+#ifdef _WIN32
+
+static int to_python_time(const FILETIME *tm)
+{
+ /* number of seconds between epoch and January 1 1601 */
+ const __int64 a0 = (__int64)134774L * (__int64)24L * (__int64)3600L;
+ /* conversion factor from 100ns to 1s */
+ const __int64 a1 = 10000000;
+ /* explicit (int) cast to suspend compiler warnings */
+ return (int)((((__int64)tm->dwHighDateTime << 32)
+ + tm->dwLowDateTime) / a1 - a0);
+}
+
+static PyObject *make_item(const WIN32_FIND_DATAA *fd, int wantstat)
+{
+ PyObject *py_st;
+ struct hg_stat *stp;
+
+ int kind = (fd->dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY)
+ ? _S_IFDIR : _S_IFREG;
+
+ if (!wantstat)
+ return Py_BuildValue("si", fd->cFileName, kind);
+
+ py_st = PyObject_CallObject((PyObject *)&listdir_stat_type, NULL);
+ if (!py_st)
+ return NULL;
+
+ stp = &((struct listdir_stat *)py_st)->st;
+ /*
+ use kind as st_mode
+ rwx bits on Win32 are meaningless
+ and Hg does not use them anyway
+ */
+ stp->st_mode = kind;
+ stp->st_mtime = to_python_time(&fd->ftLastWriteTime);
+ stp->st_ctime = to_python_time(&fd->ftCreationTime);
+ if (kind == _S_IFREG)
+ stp->st_size = ((__int64)fd->nFileSizeHigh << 32)
+ + fd->nFileSizeLow;
+ return Py_BuildValue("siN", fd->cFileName,
+ kind, py_st);
+}
+
+static PyObject *_listdir(char *path, int plen, int wantstat, char *skip)
+{
+ PyObject *rval = NULL; /* initialize - return value */
+ PyObject *list;
+ HANDLE fh;
+ WIN32_FIND_DATAA fd;
+ char *pattern;
+
+ /* build the path + \* pattern string */
+ pattern = PyMem_Malloc(plen + 3); /* path + \* + \0 */
+ if (!pattern) {
+ PyErr_NoMemory();
+ goto error_nomem;
+ }
+ memcpy(pattern, path, plen);
+
+ if (plen > 0) {
+ char c = path[plen-1];
+ if (c != ':' && c != '/' && c != '\\')
+ pattern[plen++] = '\\';
+ }
+ pattern[plen++] = '*';
+ pattern[plen] = '\0';
+
+ fh = FindFirstFileA(pattern, &fd);
+ if (fh == INVALID_HANDLE_VALUE) {
+ PyErr_SetFromWindowsErrWithFilename(GetLastError(), path);
+ goto error_file;
+ }
+
+ list = PyList_New(0);
+ if (!list)
+ goto error_list;
+
+ do {
+ PyObject *item;
+
+ if (fd.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY) {
+ if (!strcmp(fd.cFileName, ".")
+ || !strcmp(fd.cFileName, ".."))
+ continue;
+
+ if (skip && !strcmp(fd.cFileName, skip)) {
+ rval = PyList_New(0);
+ goto error;
+ }
+ }
+
+ item = make_item(&fd, wantstat);
+ if (!item)
+ goto error;
+
+ if (PyList_Append(list, item)) {
+ Py_XDECREF(item);
+ goto error;
+ }
+
+ Py_XDECREF(item);
+ } while (FindNextFileA(fh, &fd));
+
+ if (GetLastError() != ERROR_NO_MORE_FILES) {
+ PyErr_SetFromWindowsErrWithFilename(GetLastError(), path);
+ goto error;
+ }
+
+ rval = list;
+ Py_XINCREF(rval);
+error:
+ Py_XDECREF(list);
+error_list:
+ FindClose(fh);
+error_file:
+ PyMem_Free(pattern);
+error_nomem:
+ return rval;
+}
+
+#else
+
+int entkind(struct dirent *ent)
+{
+#ifdef DT_REG
+ switch (ent->d_type) {
+ case DT_REG: return S_IFREG;
+ case DT_DIR: return S_IFDIR;
+ case DT_LNK: return S_IFLNK;
+ case DT_BLK: return S_IFBLK;
+ case DT_CHR: return S_IFCHR;
+ case DT_FIFO: return S_IFIFO;
+ case DT_SOCK: return S_IFSOCK;
+ }
+#endif
+ return -1;
+}
+
+static PyObject *makestat(const struct stat *st)
+{
+ PyObject *stat;
+
+ stat = PyObject_CallObject((PyObject *)&listdir_stat_type, NULL);
+ if (stat)
+ memcpy(&((struct listdir_stat *)stat)->st, st, sizeof(*st));
+ return stat;
+}
+
+static PyObject *_listdir_stat(char *path, int pathlen, int keepstat,
+ char *skip)
+{
+ PyObject *list, *elem, *stat = NULL, *ret = NULL;
+ char fullpath[PATH_MAX + 10];
+ int kind, err;
+ struct stat st;
+ struct dirent *ent;
+ DIR *dir;
+#ifdef AT_SYMLINK_NOFOLLOW
+ int dfd = -1;
+#endif
+
+ if (pathlen >= PATH_MAX) {
+ errno = ENAMETOOLONG;
+ PyErr_SetFromErrnoWithFilename(PyExc_OSError, path);
+ goto error_value;
+ }
+ strncpy(fullpath, path, PATH_MAX);
+ fullpath[pathlen] = '/';
+
+#ifdef AT_SYMLINK_NOFOLLOW
+ dfd = open(path, O_RDONLY);
+ if (dfd == -1) {
+ PyErr_SetFromErrnoWithFilename(PyExc_OSError, path);
+ goto error_value;
+ }
+ dir = fdopendir(dfd);
+#else
+ dir = opendir(path);
+#endif
+ if (!dir) {
+ PyErr_SetFromErrnoWithFilename(PyExc_OSError, path);
+ goto error_dir;
+ }
+
+ list = PyList_New(0);
+ if (!list)
+ goto error_list;
+
+ while ((ent = readdir(dir))) {
+ if (!strcmp(ent->d_name, ".") || !strcmp(ent->d_name, ".."))
+ continue;
+
+ kind = entkind(ent);
+ if (kind == -1 || keepstat) {
+#ifdef AT_SYMLINK_NOFOLLOW
+ err = fstatat(dfd, ent->d_name, &st,
+ AT_SYMLINK_NOFOLLOW);
+#else
+ strncpy(fullpath + pathlen + 1, ent->d_name,
+ PATH_MAX - pathlen);
+ fullpath[PATH_MAX] = '\0';
+ err = lstat(fullpath, &st);
+#endif
+ if (err == -1) {
+ /* race with file deletion? */
+ if (errno == ENOENT)
+ continue;
+ strncpy(fullpath + pathlen + 1, ent->d_name,
+ PATH_MAX - pathlen);
+ fullpath[PATH_MAX] = 0;
+ PyErr_SetFromErrnoWithFilename(PyExc_OSError,
+ fullpath);
+ goto error;
+ }
+ kind = st.st_mode & S_IFMT;
+ }
+
+ /* quit early? */
+ if (skip && kind == S_IFDIR && !strcmp(ent->d_name, skip)) {
+ ret = PyList_New(0);
+ goto error;
+ }
+
+ if (keepstat) {
+ stat = makestat(&st);
+ if (!stat)
+ goto error;
+ elem = Py_BuildValue("siN", ent->d_name, kind, stat);
+ } else
+ elem = Py_BuildValue("si", ent->d_name, kind);
+ if (!elem)
+ goto error;
+ stat = NULL;
+
+ PyList_Append(list, elem);
+ Py_DECREF(elem);
+ }
+
+ ret = list;
+ Py_INCREF(ret);
+
+error:
+ Py_DECREF(list);
+ Py_XDECREF(stat);
+error_list:
+ closedir(dir);
+ /* closedir also closes its dirfd */
+ goto error_value;
+error_dir:
+#ifdef AT_SYMLINK_NOFOLLOW
+ close(dfd);
+#endif
+error_value:
+ return ret;
+}
+
+#ifdef __APPLE__
+
+typedef struct {
+ u_int32_t length;
+ attrreference_t name;
+ fsobj_type_t obj_type;
+ struct timespec mtime;
+#if __LITTLE_ENDIAN__
+ mode_t access_mask;
+ uint16_t padding;
+#else
+ uint16_t padding;
+ mode_t access_mask;
+#endif
+ off_t size;
+} __attribute__((packed)) attrbuf_entry;
+
+int attrkind(attrbuf_entry *entry)
+{
+ switch (entry->obj_type) {
+ case VREG: return S_IFREG;
+ case VDIR: return S_IFDIR;
+ case VLNK: return S_IFLNK;
+ case VBLK: return S_IFBLK;
+ case VCHR: return S_IFCHR;
+ case VFIFO: return S_IFIFO;
+ case VSOCK: return S_IFSOCK;
+ }
+ return -1;
+}
+
+/* get these many entries at a time */
+#define LISTDIR_BATCH_SIZE 50
+
+static PyObject *_listdir_batch(char *path, int pathlen, int keepstat,
+ char *skip, bool *fallback)
+{
+ PyObject *list, *elem, *stat = NULL, *ret = NULL;
+ int kind, err;
+ unsigned long index;
+ unsigned int count, old_state, new_state;
+ bool state_seen = false;
+ attrbuf_entry *entry;
+ /* from the getattrlist(2) man page: a path can be no longer than
+ (NAME_MAX * 3 + 1) bytes. Also, "The getattrlist() function will
+ silently truncate attribute data if attrBufSize is too small." So
+ pass in a buffer big enough for the worst case. */
+ char attrbuf[LISTDIR_BATCH_SIZE * (sizeof(attrbuf_entry) + NAME_MAX * 3 + 1)];
+ unsigned int basep_unused;
+
+ struct stat st;
+ int dfd = -1;
+
+ /* these must match the attrbuf_entry struct, otherwise you'll end up
+ with garbage */
+ struct attrlist requested_attr = {0};
+ requested_attr.bitmapcount = ATTR_BIT_MAP_COUNT;
+ requested_attr.commonattr = (ATTR_CMN_NAME | ATTR_CMN_OBJTYPE |
+ ATTR_CMN_MODTIME | ATTR_CMN_ACCESSMASK);
+ requested_attr.fileattr = ATTR_FILE_DATALENGTH;
+
+ *fallback = false;
+
+ if (pathlen >= PATH_MAX) {
+ errno = ENAMETOOLONG;
+ PyErr_SetFromErrnoWithFilename(PyExc_OSError, path);
+ goto error_value;
+ }
+
+ dfd = open(path, O_RDONLY);
+ if (dfd == -1) {
+ PyErr_SetFromErrnoWithFilename(PyExc_OSError, path);
+ goto error_value;
+ }
+
+ list = PyList_New(0);
+ if (!list)
+ goto error_dir;
+
+ do {
+ count = LISTDIR_BATCH_SIZE;
+ err = getdirentriesattr(dfd, &requested_attr, &attrbuf,
+ sizeof(attrbuf), &count, &basep_unused,
+ &new_state, 0);
+ if (err < 0) {
+ if (errno == ENOTSUP) {
+ /* We're on a filesystem that doesn't support
+ getdirentriesattr. Fall back to the
+ stat-based implementation. */
+ *fallback = true;
+ } else
+ PyErr_SetFromErrnoWithFilename(PyExc_OSError, path);
+ goto error;
+ }
+
+ if (!state_seen) {
+ old_state = new_state;
+ state_seen = true;
+ } else if (old_state != new_state) {
+ /* There's an edge case with getdirentriesattr. Consider
+ the following initial list of files:
+
+ a
+ b
+ <--
+ c
+ d
+
+ If the iteration is paused at the arrow, and b is
+ deleted before it is resumed, getdirentriesattr will
+ not return d at all! Ordinarily we're expected to
+ restart the iteration from the beginning. To avoid
+ getting stuck in a retry loop here, fall back to
+ stat. */
+ *fallback = true;
+ goto error;
+ }
+
+ entry = (attrbuf_entry *)attrbuf;
+
+ for (index = 0; index < count; index++) {
+ char *filename = ((char *)&entry->name) +
+ entry->name.attr_dataoffset;
+
+ if (!strcmp(filename, ".") || !strcmp(filename, ".."))
+ continue;
+
+ kind = attrkind(entry);
+ if (kind == -1) {
+ PyErr_Format(PyExc_OSError,
+ "unknown object type %u for file "
+ "%s%s!",
+ entry->obj_type, path, filename);
+ goto error;
+ }
+
+ /* quit early? */
+ if (skip && kind == S_IFDIR && !strcmp(filename, skip)) {
+ ret = PyList_New(0);
+ goto error;
+ }
+
+ if (keepstat) {
+ /* from the getattrlist(2) man page: "Only the
+ permission bits ... are valid". */
+ st.st_mode = (entry->access_mask & ~S_IFMT) | kind;
+ st.st_mtime = entry->mtime.tv_sec;
+ st.st_size = entry->size;
+ stat = makestat(&st);
+ if (!stat)
+ goto error;
+ elem = Py_BuildValue("siN", filename, kind, stat);
+ } else
+ elem = Py_BuildValue("si", filename, kind);
+ if (!elem)
+ goto error;
+ stat = NULL;
+
+ PyList_Append(list, elem);
+ Py_DECREF(elem);
+
+ entry = (attrbuf_entry *)((char *)entry + entry->length);
+ }
+ } while (err == 0);
+
+ ret = list;
+ Py_INCREF(ret);
+
+error:
+ Py_DECREF(list);
+ Py_XDECREF(stat);
+error_dir:
+ close(dfd);
+error_value:
+ return ret;
+}
+
+#endif /* __APPLE__ */
+
+static PyObject *_listdir(char *path, int pathlen, int keepstat, char *skip)
+{
+#ifdef __APPLE__
+ PyObject *ret;
+ bool fallback = false;
+
+ ret = _listdir_batch(path, pathlen, keepstat, skip, &fallback);
+ if (ret != NULL || !fallback)
+ return ret;
+#endif
+ return _listdir_stat(path, pathlen, keepstat, skip);
+}
+
+static PyObject *statfiles(PyObject *self, PyObject *args)
+{
+ PyObject *names, *stats;
+ Py_ssize_t i, count;
+
+ if (!PyArg_ParseTuple(args, "O:statfiles", &names))
+ return NULL;
+
+ count = PySequence_Length(names);
+ if (count == -1) {
+ PyErr_SetString(PyExc_TypeError, "not a sequence");
+ return NULL;
+ }
+
+ stats = PyList_New(count);
+ if (stats == NULL)
+ return NULL;
+
+ for (i = 0; i < count; i++) {
+ PyObject *stat, *pypath;
+ struct stat st;
+ int ret, kind;
+ char *path;
+
+ /* With a large file count or on a slow filesystem,
+ don't block signals for long (issue4878). */
+ if ((i % 1000) == 999 && PyErr_CheckSignals() == -1)
+ goto bail;
+
+ pypath = PySequence_GetItem(names, i);
+ if (!pypath)
+ goto bail;
+ path = PyBytes_AsString(pypath);
+ if (path == NULL) {
+ Py_DECREF(pypath);
+ PyErr_SetString(PyExc_TypeError, "not a string");
+ goto bail;
+ }
+ ret = lstat(path, &st);
+ Py_DECREF(pypath);
+ kind = st.st_mode & S_IFMT;
+ if (ret != -1 && (kind == S_IFREG || kind == S_IFLNK)) {
+ stat = makestat(&st);
+ if (stat == NULL)
+ goto bail;
+ PyList_SET_ITEM(stats, i, stat);
+ } else {
+ Py_INCREF(Py_None);
+ PyList_SET_ITEM(stats, i, Py_None);
+ }
+ }
+
+ return stats;
+
+bail:
+ Py_DECREF(stats);
+ return NULL;
+}
+
+/*
+ * recvfds() simply does not release GIL during blocking io operation because
+ * command server is known to be single-threaded.
+ *
+ * Old systems such as Solaris don't provide CMSG_LEN, msg_control, etc.
+ * Currently, recvfds() is not supported on these platforms.
+ */
+#ifdef CMSG_LEN
+
+static ssize_t recvfdstobuf(int sockfd, int **rfds, void *cbuf, size_t cbufsize)
+{
+ char dummy[1];
+ struct iovec iov = {dummy, sizeof(dummy)};
+ struct msghdr msgh = {0};
+ struct cmsghdr *cmsg;
+
+ msgh.msg_iov = &iov;
+ msgh.msg_iovlen = 1;
+ msgh.msg_control = cbuf;
+ msgh.msg_controllen = (socklen_t)cbufsize;
+ if (recvmsg(sockfd, &msgh, 0) < 0)
+ return -1;
+
+ for (cmsg = CMSG_FIRSTHDR(&msgh); cmsg;
+ cmsg = CMSG_NXTHDR(&msgh, cmsg)) {
+ if (cmsg->cmsg_level != SOL_SOCKET ||
+ cmsg->cmsg_type != SCM_RIGHTS)
+ continue;
+ *rfds = (int *)CMSG_DATA(cmsg);
+ return (cmsg->cmsg_len - CMSG_LEN(0)) / sizeof(int);
+ }
+
+ *rfds = cbuf;
+ return 0;
+}
+
+static PyObject *recvfds(PyObject *self, PyObject *args)
+{
+ int sockfd;
+ int *rfds = NULL;
+ ssize_t rfdscount, i;
+ char cbuf[256];
+ PyObject *rfdslist = NULL;
+
+ if (!PyArg_ParseTuple(args, "i", &sockfd))
+ return NULL;
+
+ rfdscount = recvfdstobuf(sockfd, &rfds, cbuf, sizeof(cbuf));
+ if (rfdscount < 0)
+ return PyErr_SetFromErrno(PyExc_OSError);
+
+ rfdslist = PyList_New(rfdscount);
+ if (!rfdslist)
+ goto bail;
+ for (i = 0; i < rfdscount; i++) {
+ PyObject *obj = PyLong_FromLong(rfds[i]);
+ if (!obj)
+ goto bail;
+ PyList_SET_ITEM(rfdslist, i, obj);
+ }
+ return rfdslist;
+
+bail:
+ Py_XDECREF(rfdslist);
+ return NULL;
+}
+
+#endif /* CMSG_LEN */
+
+#if defined(HAVE_SETPROCTITLE)
+/* setproctitle is the first choice - available in FreeBSD */
+#define SETPROCNAME_USE_SETPROCTITLE
+#elif (defined(__linux__) || defined(__APPLE__)) && PY_MAJOR_VERSION == 2
+/* rewrite the argv buffer in place - works in Linux and OS X. Py_GetArgcArgv
+ * in Python 3 returns the copied wchar_t **argv, thus unsupported. */
+#define SETPROCNAME_USE_ARGVREWRITE
+#else
+#define SETPROCNAME_USE_NONE
+#endif
+
+#ifndef SETPROCNAME_USE_NONE
+static PyObject *setprocname(PyObject *self, PyObject *args)
+{
+ const char *name = NULL;
+ if (!PyArg_ParseTuple(args, "s", &name))
+ return NULL;
+
+#if defined(SETPROCNAME_USE_SETPROCTITLE)
+ setproctitle("%s", name);
+#elif defined(SETPROCNAME_USE_ARGVREWRITE)
+ {
+ static char *argvstart = NULL;
+ static size_t argvsize = 0;
+ if (argvstart == NULL) {
+ int argc = 0, i;
+ char **argv = NULL;
+ char *argvend;
+ extern void Py_GetArgcArgv(int *argc, char ***argv);
+ Py_GetArgcArgv(&argc, &argv);
+
+ /* Check the memory we can use. Typically, argv[i] and
+ * argv[i + 1] are continuous. */
+ argvend = argvstart = argv[0];
+ for (i = 0; i < argc; ++i) {
+ if (argv[i] > argvend || argv[i] < argvstart)
+ break; /* not continuous */
+ size_t len = strlen(argv[i]);
+ argvend = argv[i] + len + 1 /* '\0' */;
+ }
+ if (argvend > argvstart) /* sanity check */
+ argvsize = argvend - argvstart;
+ }
+
+ if (argvstart && argvsize > 1) {
+ int n = snprintf(argvstart, argvsize, "%s", name);
+ if (n >= 0 && (size_t)n < argvsize)
+ memset(argvstart + n, 0, argvsize - n);
+ }
+ }
+#endif
+
+ Py_RETURN_NONE;
+}
+#endif /* ndef SETPROCNAME_USE_NONE */
+
+#if defined(HAVE_BSD_STATFS)
+static const char *describefstype(const struct statfs *pbuf)
+{
+ /* BSD or OSX provides a f_fstypename field */
+ return pbuf->f_fstypename;
+}
+#elif defined(HAVE_LINUX_STATFS)
+static const char *describefstype(const struct statfs *pbuf)
+{
+ /* Begin of Linux filesystems */
+#ifdef ADFS_SUPER_MAGIC
+ if (pbuf->f_type == ADFS_SUPER_MAGIC)
+ return "adfs";
+#endif
+#ifdef AFFS_SUPER_MAGIC
+ if (pbuf->f_type == AFFS_SUPER_MAGIC)
+ return "affs";
+#endif
+#ifdef AUTOFS_SUPER_MAGIC
+ if (pbuf->f_type == AUTOFS_SUPER_MAGIC)
+ return "autofs";
+#endif
+#ifdef BDEVFS_MAGIC
+ if (pbuf->f_type == BDEVFS_MAGIC)
+ return "bdevfs";
+#endif
+#ifdef BEFS_SUPER_MAGIC
+ if (pbuf->f_type == BEFS_SUPER_MAGIC)
+ return "befs";
+#endif
+#ifdef BFS_MAGIC
+ if (pbuf->f_type == BFS_MAGIC)
+ return "bfs";
+#endif
+#ifdef BINFMTFS_MAGIC
+ if (pbuf->f_type == BINFMTFS_MAGIC)
+ return "binfmtfs";
+#endif
+#ifdef BTRFS_SUPER_MAGIC
+ if (pbuf->f_type == BTRFS_SUPER_MAGIC)
+ return "btrfs";
+#endif
+#ifdef CGROUP_SUPER_MAGIC
+ if (pbuf->f_type == CGROUP_SUPER_MAGIC)
+ return "cgroup";
+#endif
+#ifdef CIFS_MAGIC_NUMBER
+ if (pbuf->f_type == CIFS_MAGIC_NUMBER)
+ return "cifs";
+#endif
+#ifdef CODA_SUPER_MAGIC
+ if (pbuf->f_type == CODA_SUPER_MAGIC)
+ return "coda";
+#endif
+#ifdef COH_SUPER_MAGIC
+ if (pbuf->f_type == COH_SUPER_MAGIC)
+ return "coh";
+#endif
+#ifdef CRAMFS_MAGIC
+ if (pbuf->f_type == CRAMFS_MAGIC)
+ return "cramfs";
+#endif
+#ifdef DEBUGFS_MAGIC
+ if (pbuf->f_type == DEBUGFS_MAGIC)
+ return "debugfs";
+#endif
+#ifdef DEVFS_SUPER_MAGIC
+ if (pbuf->f_type == DEVFS_SUPER_MAGIC)
+ return "devfs";
+#endif
+#ifdef DEVPTS_SUPER_MAGIC
+ if (pbuf->f_type == DEVPTS_SUPER_MAGIC)
+ return "devpts";
+#endif
+#ifdef EFIVARFS_MAGIC
+ if (pbuf->f_type == EFIVARFS_MAGIC)
+ return "efivarfs";
+#endif
+#ifdef EFS_SUPER_MAGIC
+ if (pbuf->f_type == EFS_SUPER_MAGIC)
+ return "efs";
+#endif
+#ifdef EXT_SUPER_MAGIC
+ if (pbuf->f_type == EXT_SUPER_MAGIC)
+ return "ext";
+#endif
+#ifdef EXT2_OLD_SUPER_MAGIC
+ if (pbuf->f_type == EXT2_OLD_SUPER_MAGIC)
+ return "ext2";
+#endif
+#ifdef EXT2_SUPER_MAGIC
+ if (pbuf->f_type == EXT2_SUPER_MAGIC)
+ return "ext2";
+#endif
+#ifdef EXT3_SUPER_MAGIC
+ if (pbuf->f_type == EXT3_SUPER_MAGIC)
+ return "ext3";
+#endif
+#ifdef EXT4_SUPER_MAGIC
+ if (pbuf->f_type == EXT4_SUPER_MAGIC)
+ return "ext4";
+#endif
+#ifdef F2FS_SUPER_MAGIC
+ if (pbuf->f_type == F2FS_SUPER_MAGIC)
+ return "f2fs";
+#endif
+#ifdef FUSE_SUPER_MAGIC
+ if (pbuf->f_type == FUSE_SUPER_MAGIC)
+ return "fuse";
+#endif
+#ifdef FUTEXFS_SUPER_MAGIC
+ if (pbuf->f_type == FUTEXFS_SUPER_MAGIC)
+ return "futexfs";
+#endif
+#ifdef HFS_SUPER_MAGIC
+ if (pbuf->f_type == HFS_SUPER_MAGIC)
+ return "hfs";
+#endif
+#ifdef HOSTFS_SUPER_MAGIC
+ if (pbuf->f_type == HOSTFS_SUPER_MAGIC)
+ return "hostfs";
+#endif
+#ifdef HPFS_SUPER_MAGIC
+ if (pbuf->f_type == HPFS_SUPER_MAGIC)
+ return "hpfs";
+#endif
+#ifdef HUGETLBFS_MAGIC
+ if (pbuf->f_type == HUGETLBFS_MAGIC)
+ return "hugetlbfs";
+#endif
+#ifdef ISOFS_SUPER_MAGIC
+ if (pbuf->f_type == ISOFS_SUPER_MAGIC)
+ return "isofs";
+#endif
+#ifdef JFFS2_SUPER_MAGIC
+ if (pbuf->f_type == JFFS2_SUPER_MAGIC)
+ return "jffs2";
+#endif
+#ifdef JFS_SUPER_MAGIC
+ if (pbuf->f_type == JFS_SUPER_MAGIC)
+ return "jfs";
+#endif
+#ifdef MINIX_SUPER_MAGIC
+ if (pbuf->f_type == MINIX_SUPER_MAGIC)
+ return "minix";
+#endif
+#ifdef MINIX2_SUPER_MAGIC
+ if (pbuf->f_type == MINIX2_SUPER_MAGIC)
+ return "minix2";
+#endif
+#ifdef MINIX3_SUPER_MAGIC
+ if (pbuf->f_type == MINIX3_SUPER_MAGIC)
+ return "minix3";
+#endif
+#ifdef MQUEUE_MAGIC
+ if (pbuf->f_type == MQUEUE_MAGIC)
+ return "mqueue";
+#endif
+#ifdef MSDOS_SUPER_MAGIC
+ if (pbuf->f_type == MSDOS_SUPER_MAGIC)
+ return "msdos";
+#endif
+#ifdef NCP_SUPER_MAGIC
+ if (pbuf->f_type == NCP_SUPER_MAGIC)
+ return "ncp";
+#endif
+#ifdef NFS_SUPER_MAGIC
+ if (pbuf->f_type == NFS_SUPER_MAGIC)
+ return "nfs";
+#endif
+#ifdef NILFS_SUPER_MAGIC
+ if (pbuf->f_type == NILFS_SUPER_MAGIC)
+ return "nilfs";
+#endif
+#ifdef NTFS_SB_MAGIC
+ if (pbuf->f_type == NTFS_SB_MAGIC)
+ return "ntfs-sb";
+#endif
+#ifdef OCFS2_SUPER_MAGIC
+ if (pbuf->f_type == OCFS2_SUPER_MAGIC)
+ return "ocfs2";
+#endif
+#ifdef OPENPROM_SUPER_MAGIC
+ if (pbuf->f_type == OPENPROM_SUPER_MAGIC)
+ return "openprom";
+#endif
+#ifdef OVERLAYFS_SUPER_MAGIC
+ if (pbuf->f_type == OVERLAYFS_SUPER_MAGIC)
+ return "overlay";
+#endif
+#ifdef PIPEFS_MAGIC
+ if (pbuf->f_type == PIPEFS_MAGIC)
+ return "pipefs";
+#endif
+#ifdef PROC_SUPER_MAGIC
+ if (pbuf->f_type == PROC_SUPER_MAGIC)
+ return "proc";
+#endif
+#ifdef PSTOREFS_MAGIC
+ if (pbuf->f_type == PSTOREFS_MAGIC)
+ return "pstorefs";
+#endif
+#ifdef QNX4_SUPER_MAGIC
+ if (pbuf->f_type == QNX4_SUPER_MAGIC)
+ return "qnx4";
+#endif
+#ifdef QNX6_SUPER_MAGIC
+ if (pbuf->f_type == QNX6_SUPER_MAGIC)
+ return "qnx6";
+#endif
+#ifdef RAMFS_MAGIC
+ if (pbuf->f_type == RAMFS_MAGIC)
+ return "ramfs";
+#endif
+#ifdef REISERFS_SUPER_MAGIC
+ if (pbuf->f_type == REISERFS_SUPER_MAGIC)
+ return "reiserfs";
+#endif
+#ifdef ROMFS_MAGIC
+ if (pbuf->f_type == ROMFS_MAGIC)
+ return "romfs";
+#endif
+#ifdef SECURITYFS_MAGIC
+ if (pbuf->f_type == SECURITYFS_MAGIC)
+ return "securityfs";
+#endif
+#ifdef SELINUX_MAGIC
+ if (pbuf->f_type == SELINUX_MAGIC)
+ return "selinux";
+#endif
+#ifdef SMACK_MAGIC
+ if (pbuf->f_type == SMACK_MAGIC)
+ return "smack";
+#endif
+#ifdef SMB_SUPER_MAGIC
+ if (pbuf->f_type == SMB_SUPER_MAGIC)
+ return "smb";
+#endif
+#ifdef SOCKFS_MAGIC
+ if (pbuf->f_type == SOCKFS_MAGIC)
+ return "sockfs";
+#endif
+#ifdef SQUASHFS_MAGIC
+ if (pbuf->f_type == SQUASHFS_MAGIC)
+ return "squashfs";
+#endif
+#ifdef SYSFS_MAGIC
+ if (pbuf->f_type == SYSFS_MAGIC)
+ return "sysfs";
+#endif
+#ifdef SYSV2_SUPER_MAGIC
+ if (pbuf->f_type == SYSV2_SUPER_MAGIC)
+ return "sysv2";
+#endif
+#ifdef SYSV4_SUPER_MAGIC
+ if (pbuf->f_type == SYSV4_SUPER_MAGIC)
+ return "sysv4";
+#endif
+#ifdef TMPFS_MAGIC
+ if (pbuf->f_type == TMPFS_MAGIC)
+ return "tmpfs";
+#endif
+#ifdef UDF_SUPER_MAGIC
+ if (pbuf->f_type == UDF_SUPER_MAGIC)
+ return "udf";
+#endif
+#ifdef UFS_MAGIC
+ if (pbuf->f_type == UFS_MAGIC)
+ return "ufs";
+#endif
+#ifdef USBDEVICE_SUPER_MAGIC
+ if (pbuf->f_type == USBDEVICE_SUPER_MAGIC)
+ return "usbdevice";
+#endif
+#ifdef V9FS_MAGIC
+ if (pbuf->f_type == V9FS_MAGIC)
+ return "v9fs";
+#endif
+#ifdef VXFS_SUPER_MAGIC
+ if (pbuf->f_type == VXFS_SUPER_MAGIC)
+ return "vxfs";
+#endif
+#ifdef XENFS_SUPER_MAGIC
+ if (pbuf->f_type == XENFS_SUPER_MAGIC)
+ return "xenfs";
+#endif
+#ifdef XENIX_SUPER_MAGIC
+ if (pbuf->f_type == XENIX_SUPER_MAGIC)
+ return "xenix";
+#endif
+#ifdef XFS_SUPER_MAGIC
+ if (pbuf->f_type == XFS_SUPER_MAGIC)
+ return "xfs";
+#endif
+ /* End of Linux filesystems */
+ return NULL;
+}
+#endif /* def HAVE_LINUX_STATFS */
+
+#if defined(HAVE_BSD_STATFS) || defined(HAVE_LINUX_STATFS)
+/* given a directory path, return filesystem type name (best-effort) */
+static PyObject *getfstype(PyObject *self, PyObject *args)
+{
+ const char *path = NULL;
+ struct statfs buf;
+ int r;
+ if (!PyArg_ParseTuple(args, "s", &path))
+ return NULL;
+
+ memset(&buf, 0, sizeof(buf));
+ r = statfs(path, &buf);
+ if (r != 0)
+ return PyErr_SetFromErrno(PyExc_OSError);
+ return Py_BuildValue("s", describefstype(&buf));
+}
+#endif /* defined(HAVE_LINUX_STATFS) || defined(HAVE_BSD_STATFS) */
+
+#endif /* ndef _WIN32 */
+
+static PyObject *listdir(PyObject *self, PyObject *args, PyObject *kwargs)
+{
+ PyObject *statobj = NULL; /* initialize - optional arg */
+ PyObject *skipobj = NULL; /* initialize - optional arg */
+ char *path, *skip = NULL;
+ int wantstat, plen;
+
+ static char *kwlist[] = {"path", "stat", "skip", NULL};
+
+ if (!PyArg_ParseTupleAndKeywords(args, kwargs, "s#|OO:listdir",
+ kwlist, &path, &plen, &statobj, &skipobj))
+ return NULL;
+
+ wantstat = statobj && PyObject_IsTrue(statobj);
+
+ if (skipobj && skipobj != Py_None) {
+ skip = PyBytes_AsString(skipobj);
+ if (!skip)
+ return NULL;
+ }
+
+ return _listdir(path, plen, wantstat, skip);
+}
+
+#ifdef _WIN32
+static PyObject *posixfile(PyObject *self, PyObject *args, PyObject *kwds)
+{
+ static char *kwlist[] = {"name", "mode", "buffering", NULL};
+ PyObject *file_obj = NULL;
+ char *name = NULL;
+ char *mode = "rb";
+ DWORD access = 0;
+ DWORD creation;
+ HANDLE handle;
+ int fd, flags = 0;
+ int bufsize = -1;
+ char m0, m1, m2;
+ char fpmode[4];
+ int fppos = 0;
+ int plus;
+ FILE *fp;
+
+ if (!PyArg_ParseTupleAndKeywords(args, kwds, "et|si:posixfile", kwlist,
+ Py_FileSystemDefaultEncoding,
+ &name, &mode, &bufsize))
+ return NULL;
+
+ m0 = mode[0];
+ m1 = m0 ? mode[1] : '\0';
+ m2 = m1 ? mode[2] : '\0';
+ plus = m1 == '+' || m2 == '+';
+
+ fpmode[fppos++] = m0;
+ if (m1 == 'b' || m2 == 'b') {
+ flags = _O_BINARY;
+ fpmode[fppos++] = 'b';
+ }
+ else
+ flags = _O_TEXT;
+ if (m0 == 'r' && !plus) {
+ flags |= _O_RDONLY;
+ access = GENERIC_READ;
+ } else {
+ /*
+ work around http://support.microsoft.com/kb/899149 and
+ set _O_RDWR for 'w' and 'a', even if mode has no '+'
+ */
+ flags |= _O_RDWR;
+ access = GENERIC_READ | GENERIC_WRITE;
+ fpmode[fppos++] = '+';
+ }
+ fpmode[fppos++] = '\0';
+
+ switch (m0) {
+ case 'r':
+ creation = OPEN_EXISTING;
+ break;
+ case 'w':
+ creation = CREATE_ALWAYS;
+ break;
+ case 'a':
+ creation = OPEN_ALWAYS;
+ flags |= _O_APPEND;
+ break;
+ default:
+ PyErr_Format(PyExc_ValueError,
+ "mode string must begin with one of 'r', 'w', "
+ "or 'a', not '%c'", m0);
+ goto bail;
+ }
+
+ handle = CreateFile(name, access,
+ FILE_SHARE_READ | FILE_SHARE_WRITE |
+ FILE_SHARE_DELETE,
+ NULL,
+ creation,
+ FILE_ATTRIBUTE_NORMAL,
+ 0);
+
+ if (handle == INVALID_HANDLE_VALUE) {
+ PyErr_SetFromWindowsErrWithFilename(GetLastError(), name);
+ goto bail;
+ }
+
+ fd = _open_osfhandle((intptr_t)handle, flags);
+
+ if (fd == -1) {
+ CloseHandle(handle);
+ PyErr_SetFromErrnoWithFilename(PyExc_IOError, name);
+ goto bail;
+ }
+#ifndef IS_PY3K
+ fp = _fdopen(fd, fpmode);
+ if (fp == NULL) {
+ _close(fd);
+ PyErr_SetFromErrnoWithFilename(PyExc_IOError, name);
+ goto bail;
+ }
+
+ file_obj = PyFile_FromFile(fp, name, mode, fclose);
+ if (file_obj == NULL) {
+ fclose(fp);
+ goto bail;
+ }
+
+ PyFile_SetBufSize(file_obj, bufsize);
+#else
+ file_obj = PyFile_FromFd(fd, name, mode, bufsize, NULL, NULL, NULL, 1);
+ if (file_obj == NULL)
+ goto bail;
+#endif
+bail:
+ PyMem_Free(name);
+ return file_obj;
+}
+#endif
+
+#ifdef __APPLE__
+#include <ApplicationServices/ApplicationServices.h>
+
+static PyObject *isgui(PyObject *self)
+{
+ CFDictionaryRef dict = CGSessionCopyCurrentDictionary();
+
+ if (dict != NULL) {
+ CFRelease(dict);
+ Py_RETURN_TRUE;
+ } else {
+ Py_RETURN_FALSE;
+ }
+}
+#endif
+
+static char osutil_doc[] = "Native operating system services.";
+
+static PyMethodDef methods[] = {
+ {"listdir", (PyCFunction)listdir, METH_VARARGS | METH_KEYWORDS,
+ "list a directory\n"},
+#ifdef _WIN32
+ {"posixfile", (PyCFunction)posixfile, METH_VARARGS | METH_KEYWORDS,
+ "Open a file with POSIX-like semantics.\n"
+"On error, this function may raise either a WindowsError or an IOError."},
+#else
+ {"statfiles", (PyCFunction)statfiles, METH_VARARGS | METH_KEYWORDS,
+ "stat a series of files or symlinks\n"
+"Returns None for non-existent entries and entries of other types.\n"},
+#ifdef CMSG_LEN
+ {"recvfds", (PyCFunction)recvfds, METH_VARARGS,
+ "receive list of file descriptors via socket\n"},
+#endif
+#ifndef SETPROCNAME_USE_NONE
+ {"setprocname", (PyCFunction)setprocname, METH_VARARGS,
+ "set process title (best-effort)\n"},
+#endif
+#if defined(HAVE_BSD_STATFS) || defined(HAVE_LINUX_STATFS)
+ {"getfstype", (PyCFunction)getfstype, METH_VARARGS,
+ "get filesystem type (best-effort)\n"},
+#endif
+#endif /* ndef _WIN32 */
+#ifdef __APPLE__
+ {
+ "isgui", (PyCFunction)isgui, METH_NOARGS,
+ "Is a CoreGraphics session available?"
+ },
+#endif
+ {NULL, NULL}
+};
+
+static const int version = 1;
+
+#ifdef IS_PY3K
+static struct PyModuleDef osutil_module = {
+ PyModuleDef_HEAD_INIT,
+ "osutil",
+ osutil_doc,
+ -1,
+ methods
+};
+
+PyMODINIT_FUNC PyInit_osutil(void)
+{
+ PyObject *m;
+ if (PyType_Ready(&listdir_stat_type) < 0)
+ return NULL;
+
+ m = PyModule_Create(&osutil_module);
+ PyModule_AddIntConstant(m, "version", version);
+ return m;
+}
+#else
+PyMODINIT_FUNC initosutil(void)
+{
+ PyObject *m;
+ if (PyType_Ready(&listdir_stat_type) == -1)
+ return;
+
+ m = Py_InitModule3("osutil", methods, osutil_doc);
+ PyModule_AddIntConstant(m, "version", version);
+}
+#endif
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cext/parsers.c Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,1009 @@
+/*
+ parsers.c - efficient content parsing
+
+ Copyright 2008 Matt Mackall <mpm@selenic.com> and others
+
+ This software may be used and distributed according to the terms of
+ the GNU General Public License, incorporated herein by reference.
+*/
+
+#include <Python.h>
+#include <ctype.h>
+#include <stddef.h>
+#include <string.h>
+
+#include "util.h"
+#include "bitmanipulation.h"
+
+#ifdef IS_PY3K
+/* The mapping of Python types is meant to be temporary to get Python
+ * 3 to compile. We should remove this once Python 3 support is fully
+ * supported and proper types are used in the extensions themselves. */
+#define PyInt_Type PyLong_Type
+#define PyInt_Check PyLong_Check
+#define PyInt_FromLong PyLong_FromLong
+#define PyInt_FromSsize_t PyLong_FromSsize_t
+#define PyInt_AS_LONG PyLong_AS_LONG
+#define PyInt_AsLong PyLong_AsLong
+#endif
+
+static const char *const versionerrortext = "Python minor version mismatch";
+
+static const char lowertable[128] = {
+ '\x00', '\x01', '\x02', '\x03', '\x04', '\x05', '\x06', '\x07',
+ '\x08', '\x09', '\x0a', '\x0b', '\x0c', '\x0d', '\x0e', '\x0f',
+ '\x10', '\x11', '\x12', '\x13', '\x14', '\x15', '\x16', '\x17',
+ '\x18', '\x19', '\x1a', '\x1b', '\x1c', '\x1d', '\x1e', '\x1f',
+ '\x20', '\x21', '\x22', '\x23', '\x24', '\x25', '\x26', '\x27',
+ '\x28', '\x29', '\x2a', '\x2b', '\x2c', '\x2d', '\x2e', '\x2f',
+ '\x30', '\x31', '\x32', '\x33', '\x34', '\x35', '\x36', '\x37',
+ '\x38', '\x39', '\x3a', '\x3b', '\x3c', '\x3d', '\x3e', '\x3f',
+ '\x40',
+ '\x61', '\x62', '\x63', '\x64', '\x65', '\x66', '\x67', /* A-G */
+ '\x68', '\x69', '\x6a', '\x6b', '\x6c', '\x6d', '\x6e', '\x6f', /* H-O */
+ '\x70', '\x71', '\x72', '\x73', '\x74', '\x75', '\x76', '\x77', /* P-W */
+ '\x78', '\x79', '\x7a', /* X-Z */
+ '\x5b', '\x5c', '\x5d', '\x5e', '\x5f',
+ '\x60', '\x61', '\x62', '\x63', '\x64', '\x65', '\x66', '\x67',
+ '\x68', '\x69', '\x6a', '\x6b', '\x6c', '\x6d', '\x6e', '\x6f',
+ '\x70', '\x71', '\x72', '\x73', '\x74', '\x75', '\x76', '\x77',
+ '\x78', '\x79', '\x7a', '\x7b', '\x7c', '\x7d', '\x7e', '\x7f'
+};
+
+static const char uppertable[128] = {
+ '\x00', '\x01', '\x02', '\x03', '\x04', '\x05', '\x06', '\x07',
+ '\x08', '\x09', '\x0a', '\x0b', '\x0c', '\x0d', '\x0e', '\x0f',
+ '\x10', '\x11', '\x12', '\x13', '\x14', '\x15', '\x16', '\x17',
+ '\x18', '\x19', '\x1a', '\x1b', '\x1c', '\x1d', '\x1e', '\x1f',
+ '\x20', '\x21', '\x22', '\x23', '\x24', '\x25', '\x26', '\x27',
+ '\x28', '\x29', '\x2a', '\x2b', '\x2c', '\x2d', '\x2e', '\x2f',
+ '\x30', '\x31', '\x32', '\x33', '\x34', '\x35', '\x36', '\x37',
+ '\x38', '\x39', '\x3a', '\x3b', '\x3c', '\x3d', '\x3e', '\x3f',
+ '\x40', '\x41', '\x42', '\x43', '\x44', '\x45', '\x46', '\x47',
+ '\x48', '\x49', '\x4a', '\x4b', '\x4c', '\x4d', '\x4e', '\x4f',
+ '\x50', '\x51', '\x52', '\x53', '\x54', '\x55', '\x56', '\x57',
+ '\x58', '\x59', '\x5a', '\x5b', '\x5c', '\x5d', '\x5e', '\x5f',
+ '\x60',
+ '\x41', '\x42', '\x43', '\x44', '\x45', '\x46', '\x47', /* a-g */
+ '\x48', '\x49', '\x4a', '\x4b', '\x4c', '\x4d', '\x4e', '\x4f', /* h-o */
+ '\x50', '\x51', '\x52', '\x53', '\x54', '\x55', '\x56', '\x57', /* p-w */
+ '\x58', '\x59', '\x5a', /* x-z */
+ '\x7b', '\x7c', '\x7d', '\x7e', '\x7f'
+};
+
+/*
+ * Turn a hex-encoded string into binary.
+ */
+PyObject *unhexlify(const char *str, int len)
+{
+ PyObject *ret;
+ char *d;
+ int i;
+
+ ret = PyBytes_FromStringAndSize(NULL, len / 2);
+
+ if (!ret)
+ return NULL;
+
+ d = PyBytes_AsString(ret);
+
+ for (i = 0; i < len;) {
+ int hi = hexdigit(str, i++);
+ int lo = hexdigit(str, i++);
+ *d++ = (hi << 4) | lo;
+ }
+
+ return ret;
+}
+
+static inline PyObject *_asciitransform(PyObject *str_obj,
+ const char table[128],
+ PyObject *fallback_fn)
+{
+ char *str, *newstr;
+ Py_ssize_t i, len;
+ PyObject *newobj = NULL;
+ PyObject *ret = NULL;
+
+ str = PyBytes_AS_STRING(str_obj);
+ len = PyBytes_GET_SIZE(str_obj);
+
+ newobj = PyBytes_FromStringAndSize(NULL, len);
+ if (!newobj)
+ goto quit;
+
+ newstr = PyBytes_AS_STRING(newobj);
+
+ for (i = 0; i < len; i++) {
+ char c = str[i];
+ if (c & 0x80) {
+ if (fallback_fn != NULL) {
+ ret = PyObject_CallFunctionObjArgs(fallback_fn,
+ str_obj, NULL);
+ } else {
+ PyObject *err = PyUnicodeDecodeError_Create(
+ "ascii", str, len, i, (i + 1),
+ "unexpected code byte");
+ PyErr_SetObject(PyExc_UnicodeDecodeError, err);
+ Py_XDECREF(err);
+ }
+ goto quit;
+ }
+ newstr[i] = table[(unsigned char)c];
+ }
+
+ ret = newobj;
+ Py_INCREF(ret);
+quit:
+ Py_XDECREF(newobj);
+ return ret;
+}
+
+static PyObject *asciilower(PyObject *self, PyObject *args)
+{
+ PyObject *str_obj;
+ if (!PyArg_ParseTuple(args, "O!:asciilower", &PyBytes_Type, &str_obj))
+ return NULL;
+ return _asciitransform(str_obj, lowertable, NULL);
+}
+
+static PyObject *asciiupper(PyObject *self, PyObject *args)
+{
+ PyObject *str_obj;
+ if (!PyArg_ParseTuple(args, "O!:asciiupper", &PyBytes_Type, &str_obj))
+ return NULL;
+ return _asciitransform(str_obj, uppertable, NULL);
+}
+
+static inline PyObject *_dict_new_presized(Py_ssize_t expected_size)
+{
+ /* _PyDict_NewPresized expects a minused parameter, but it actually
+ creates a dictionary that's the nearest power of two bigger than the
+ parameter. For example, with the initial minused = 1000, the
+ dictionary created has size 1024. Of course in a lot of cases that
+ can be greater than the maximum load factor Python's dict object
+ expects (= 2/3), so as soon as we cross the threshold we'll resize
+ anyway. So create a dictionary that's at least 3/2 the size. */
+ return _PyDict_NewPresized(((1 + expected_size) / 2) * 3);
+}
+
+static PyObject *dict_new_presized(PyObject *self, PyObject *args)
+{
+ Py_ssize_t expected_size;
+
+ if (!PyArg_ParseTuple(args, "n:make_presized_dict", &expected_size))
+ return NULL;
+
+ return _dict_new_presized(expected_size);
+}
+
+static PyObject *make_file_foldmap(PyObject *self, PyObject *args)
+{
+ PyObject *dmap, *spec_obj, *normcase_fallback;
+ PyObject *file_foldmap = NULL;
+ enum normcase_spec spec;
+ PyObject *k, *v;
+ dirstateTupleObject *tuple;
+ Py_ssize_t pos = 0;
+ const char *table;
+
+ if (!PyArg_ParseTuple(args, "O!O!O!:make_file_foldmap",
+ &PyDict_Type, &dmap,
+ &PyInt_Type, &spec_obj,
+ &PyFunction_Type, &normcase_fallback))
+ goto quit;
+
+ spec = (int)PyInt_AS_LONG(spec_obj);
+ switch (spec) {
+ case NORMCASE_LOWER:
+ table = lowertable;
+ break;
+ case NORMCASE_UPPER:
+ table = uppertable;
+ break;
+ case NORMCASE_OTHER:
+ table = NULL;
+ break;
+ default:
+ PyErr_SetString(PyExc_TypeError, "invalid normcasespec");
+ goto quit;
+ }
+
+ /* Add some more entries to deal with additions outside this
+ function. */
+ file_foldmap = _dict_new_presized((PyDict_Size(dmap) / 10) * 11);
+ if (file_foldmap == NULL)
+ goto quit;
+
+ while (PyDict_Next(dmap, &pos, &k, &v)) {
+ if (!dirstate_tuple_check(v)) {
+ PyErr_SetString(PyExc_TypeError,
+ "expected a dirstate tuple");
+ goto quit;
+ }
+
+ tuple = (dirstateTupleObject *)v;
+ if (tuple->state != 'r') {
+ PyObject *normed;
+ if (table != NULL) {
+ normed = _asciitransform(k, table,
+ normcase_fallback);
+ } else {
+ normed = PyObject_CallFunctionObjArgs(
+ normcase_fallback, k, NULL);
+ }
+
+ if (normed == NULL)
+ goto quit;
+ if (PyDict_SetItem(file_foldmap, normed, k) == -1) {
+ Py_DECREF(normed);
+ goto quit;
+ }
+ Py_DECREF(normed);
+ }
+ }
+ return file_foldmap;
+quit:
+ Py_XDECREF(file_foldmap);
+ return NULL;
+}
+
+/*
+ * This code assumes that a manifest is stitched together with newline
+ * ('\n') characters.
+ */
+static PyObject *parse_manifest(PyObject *self, PyObject *args)
+{
+ PyObject *mfdict, *fdict;
+ char *str, *start, *end;
+ int len;
+
+ if (!PyArg_ParseTuple(args, "O!O!s#:parse_manifest",
+ &PyDict_Type, &mfdict,
+ &PyDict_Type, &fdict,
+ &str, &len))
+ goto quit;
+
+ start = str;
+ end = str + len;
+ while (start < end) {
+ PyObject *file = NULL, *node = NULL;
+ PyObject *flags = NULL;
+ char *zero = NULL, *newline = NULL;
+ ptrdiff_t nlen;
+
+ zero = memchr(start, '\0', end - start);
+ if (!zero) {
+ PyErr_SetString(PyExc_ValueError,
+ "manifest entry has no separator");
+ goto quit;
+ }
+
+ newline = memchr(zero + 1, '\n', end - (zero + 1));
+ if (!newline) {
+ PyErr_SetString(PyExc_ValueError,
+ "manifest contains trailing garbage");
+ goto quit;
+ }
+
+ file = PyBytes_FromStringAndSize(start, zero - start);
+
+ if (!file)
+ goto bail;
+
+ nlen = newline - zero - 1;
+
+ node = unhexlify(zero + 1, nlen > 40 ? 40 : (int)nlen);
+ if (!node)
+ goto bail;
+
+ if (nlen > 40) {
+ flags = PyBytes_FromStringAndSize(zero + 41,
+ nlen - 40);
+ if (!flags)
+ goto bail;
+
+ if (PyDict_SetItem(fdict, file, flags) == -1)
+ goto bail;
+ }
+
+ if (PyDict_SetItem(mfdict, file, node) == -1)
+ goto bail;
+
+ start = newline + 1;
+
+ Py_XDECREF(flags);
+ Py_XDECREF(node);
+ Py_XDECREF(file);
+ continue;
+ bail:
+ Py_XDECREF(flags);
+ Py_XDECREF(node);
+ Py_XDECREF(file);
+ goto quit;
+ }
+
+ Py_INCREF(Py_None);
+ return Py_None;
+quit:
+ return NULL;
+}
+
+static inline dirstateTupleObject *make_dirstate_tuple(char state, int mode,
+ int size, int mtime)
+{
+ dirstateTupleObject *t = PyObject_New(dirstateTupleObject,
+ &dirstateTupleType);
+ if (!t)
+ return NULL;
+ t->state = state;
+ t->mode = mode;
+ t->size = size;
+ t->mtime = mtime;
+ return t;
+}
+
+static PyObject *dirstate_tuple_new(PyTypeObject *subtype, PyObject *args,
+ PyObject *kwds)
+{
+ /* We do all the initialization here and not a tp_init function because
+ * dirstate_tuple is immutable. */
+ dirstateTupleObject *t;
+ char state;
+ int size, mode, mtime;
+ if (!PyArg_ParseTuple(args, "ciii", &state, &mode, &size, &mtime))
+ return NULL;
+
+ t = (dirstateTupleObject *)subtype->tp_alloc(subtype, 1);
+ if (!t)
+ return NULL;
+ t->state = state;
+ t->mode = mode;
+ t->size = size;
+ t->mtime = mtime;
+
+ return (PyObject *)t;
+}
+
+static void dirstate_tuple_dealloc(PyObject *o)
+{
+ PyObject_Del(o);
+}
+
+static Py_ssize_t dirstate_tuple_length(PyObject *o)
+{
+ return 4;
+}
+
+static PyObject *dirstate_tuple_item(PyObject *o, Py_ssize_t i)
+{
+ dirstateTupleObject *t = (dirstateTupleObject *)o;
+ switch (i) {
+ case 0:
+ return PyBytes_FromStringAndSize(&t->state, 1);
+ case 1:
+ return PyInt_FromLong(t->mode);
+ case 2:
+ return PyInt_FromLong(t->size);
+ case 3:
+ return PyInt_FromLong(t->mtime);
+ default:
+ PyErr_SetString(PyExc_IndexError, "index out of range");
+ return NULL;
+ }
+}
+
+static PySequenceMethods dirstate_tuple_sq = {
+ dirstate_tuple_length, /* sq_length */
+ 0, /* sq_concat */
+ 0, /* sq_repeat */
+ dirstate_tuple_item, /* sq_item */
+ 0, /* sq_ass_item */
+ 0, /* sq_contains */
+ 0, /* sq_inplace_concat */
+ 0 /* sq_inplace_repeat */
+};
+
+PyTypeObject dirstateTupleType = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "dirstate_tuple", /* tp_name */
+ sizeof(dirstateTupleObject),/* tp_basicsize */
+ 0, /* tp_itemsize */
+ (destructor)dirstate_tuple_dealloc, /* tp_dealloc */
+ 0, /* tp_print */
+ 0, /* tp_getattr */
+ 0, /* tp_setattr */
+ 0, /* tp_compare */
+ 0, /* tp_repr */
+ 0, /* tp_as_number */
+ &dirstate_tuple_sq, /* tp_as_sequence */
+ 0, /* tp_as_mapping */
+ 0, /* tp_hash */
+ 0, /* tp_call */
+ 0, /* tp_str */
+ 0, /* tp_getattro */
+ 0, /* tp_setattro */
+ 0, /* tp_as_buffer */
+ Py_TPFLAGS_DEFAULT, /* tp_flags */
+ "dirstate tuple", /* tp_doc */
+ 0, /* tp_traverse */
+ 0, /* tp_clear */
+ 0, /* tp_richcompare */
+ 0, /* tp_weaklistoffset */
+ 0, /* tp_iter */
+ 0, /* tp_iternext */
+ 0, /* tp_methods */
+ 0, /* tp_members */
+ 0, /* tp_getset */
+ 0, /* tp_base */
+ 0, /* tp_dict */
+ 0, /* tp_descr_get */
+ 0, /* tp_descr_set */
+ 0, /* tp_dictoffset */
+ 0, /* tp_init */
+ 0, /* tp_alloc */
+ dirstate_tuple_new, /* tp_new */
+};
+
+static PyObject *parse_dirstate(PyObject *self, PyObject *args)
+{
+ PyObject *dmap, *cmap, *parents = NULL, *ret = NULL;
+ PyObject *fname = NULL, *cname = NULL, *entry = NULL;
+ char state, *cur, *str, *cpos;
+ int mode, size, mtime;
+ unsigned int flen, len, pos = 40;
+ int readlen;
+
+ if (!PyArg_ParseTuple(args, "O!O!s#:parse_dirstate",
+ &PyDict_Type, &dmap,
+ &PyDict_Type, &cmap,
+ &str, &readlen))
+ goto quit;
+
+ len = readlen;
+
+ /* read parents */
+ if (len < 40) {
+ PyErr_SetString(
+ PyExc_ValueError, "too little data for parents");
+ goto quit;
+ }
+
+ parents = Py_BuildValue("s#s#", str, 20, str + 20, 20);
+ if (!parents)
+ goto quit;
+
+ /* read filenames */
+ while (pos >= 40 && pos < len) {
+ if (pos + 17 > len) {
+ PyErr_SetString(PyExc_ValueError,
+ "overflow in dirstate");
+ goto quit;
+ }
+ cur = str + pos;
+ /* unpack header */
+ state = *cur;
+ mode = getbe32(cur + 1);
+ size = getbe32(cur + 5);
+ mtime = getbe32(cur + 9);
+ flen = getbe32(cur + 13);
+ pos += 17;
+ cur += 17;
+ if (flen > len - pos) {
+ PyErr_SetString(PyExc_ValueError, "overflow in dirstate");
+ goto quit;
+ }
+
+ entry = (PyObject *)make_dirstate_tuple(state, mode, size,
+ mtime);
+ cpos = memchr(cur, 0, flen);
+ if (cpos) {
+ fname = PyBytes_FromStringAndSize(cur, cpos - cur);
+ cname = PyBytes_FromStringAndSize(cpos + 1,
+ flen - (cpos - cur) - 1);
+ if (!fname || !cname ||
+ PyDict_SetItem(cmap, fname, cname) == -1 ||
+ PyDict_SetItem(dmap, fname, entry) == -1)
+ goto quit;
+ Py_DECREF(cname);
+ } else {
+ fname = PyBytes_FromStringAndSize(cur, flen);
+ if (!fname ||
+ PyDict_SetItem(dmap, fname, entry) == -1)
+ goto quit;
+ }
+ Py_DECREF(fname);
+ Py_DECREF(entry);
+ fname = cname = entry = NULL;
+ pos += flen;
+ }
+
+ ret = parents;
+ Py_INCREF(ret);
+quit:
+ Py_XDECREF(fname);
+ Py_XDECREF(cname);
+ Py_XDECREF(entry);
+ Py_XDECREF(parents);
+ return ret;
+}
+
+/*
+ * Build a set of non-normal and other parent entries from the dirstate dmap
+*/
+static PyObject *nonnormalotherparententries(PyObject *self, PyObject *args) {
+ PyObject *dmap, *fname, *v;
+ PyObject *nonnset = NULL, *otherpset = NULL, *result = NULL;
+ Py_ssize_t pos;
+
+ if (!PyArg_ParseTuple(args, "O!:nonnormalentries",
+ &PyDict_Type, &dmap))
+ goto bail;
+
+ nonnset = PySet_New(NULL);
+ if (nonnset == NULL)
+ goto bail;
+
+ otherpset = PySet_New(NULL);
+ if (otherpset == NULL)
+ goto bail;
+
+ pos = 0;
+ while (PyDict_Next(dmap, &pos, &fname, &v)) {
+ dirstateTupleObject *t;
+ if (!dirstate_tuple_check(v)) {
+ PyErr_SetString(PyExc_TypeError,
+ "expected a dirstate tuple");
+ goto bail;
+ }
+ t = (dirstateTupleObject *)v;
+
+ if (t->state == 'n' && t->size == -2) {
+ if (PySet_Add(otherpset, fname) == -1) {
+ goto bail;
+ }
+ }
+
+ if (t->state == 'n' && t->mtime != -1)
+ continue;
+ if (PySet_Add(nonnset, fname) == -1)
+ goto bail;
+ }
+
+ result = Py_BuildValue("(OO)", nonnset, otherpset);
+ if (result == NULL)
+ goto bail;
+ Py_DECREF(nonnset);
+ Py_DECREF(otherpset);
+ return result;
+bail:
+ Py_XDECREF(nonnset);
+ Py_XDECREF(otherpset);
+ Py_XDECREF(result);
+ return NULL;
+}
+
+/*
+ * Efficiently pack a dirstate object into its on-disk format.
+ */
+static PyObject *pack_dirstate(PyObject *self, PyObject *args)
+{
+ PyObject *packobj = NULL;
+ PyObject *map, *copymap, *pl, *mtime_unset = NULL;
+ Py_ssize_t nbytes, pos, l;
+ PyObject *k, *v = NULL, *pn;
+ char *p, *s;
+ int now;
+
+ if (!PyArg_ParseTuple(args, "O!O!Oi:pack_dirstate",
+ &PyDict_Type, &map, &PyDict_Type, ©map,
+ &pl, &now))
+ return NULL;
+
+ if (!PySequence_Check(pl) || PySequence_Size(pl) != 2) {
+ PyErr_SetString(PyExc_TypeError, "expected 2-element sequence");
+ return NULL;
+ }
+
+ /* Figure out how much we need to allocate. */
+ for (nbytes = 40, pos = 0; PyDict_Next(map, &pos, &k, &v);) {
+ PyObject *c;
+ if (!PyBytes_Check(k)) {
+ PyErr_SetString(PyExc_TypeError, "expected string key");
+ goto bail;
+ }
+ nbytes += PyBytes_GET_SIZE(k) + 17;
+ c = PyDict_GetItem(copymap, k);
+ if (c) {
+ if (!PyBytes_Check(c)) {
+ PyErr_SetString(PyExc_TypeError,
+ "expected string key");
+ goto bail;
+ }
+ nbytes += PyBytes_GET_SIZE(c) + 1;
+ }
+ }
+
+ packobj = PyBytes_FromStringAndSize(NULL, nbytes);
+ if (packobj == NULL)
+ goto bail;
+
+ p = PyBytes_AS_STRING(packobj);
+
+ pn = PySequence_ITEM(pl, 0);
+ if (PyBytes_AsStringAndSize(pn, &s, &l) == -1 || l != 20) {
+ PyErr_SetString(PyExc_TypeError, "expected a 20-byte hash");
+ goto bail;
+ }
+ memcpy(p, s, l);
+ p += 20;
+ pn = PySequence_ITEM(pl, 1);
+ if (PyBytes_AsStringAndSize(pn, &s, &l) == -1 || l != 20) {
+ PyErr_SetString(PyExc_TypeError, "expected a 20-byte hash");
+ goto bail;
+ }
+ memcpy(p, s, l);
+ p += 20;
+
+ for (pos = 0; PyDict_Next(map, &pos, &k, &v); ) {
+ dirstateTupleObject *tuple;
+ char state;
+ int mode, size, mtime;
+ Py_ssize_t len, l;
+ PyObject *o;
+ char *t;
+
+ if (!dirstate_tuple_check(v)) {
+ PyErr_SetString(PyExc_TypeError,
+ "expected a dirstate tuple");
+ goto bail;
+ }
+ tuple = (dirstateTupleObject *)v;
+
+ state = tuple->state;
+ mode = tuple->mode;
+ size = tuple->size;
+ mtime = tuple->mtime;
+ if (state == 'n' && mtime == now) {
+ /* See pure/parsers.py:pack_dirstate for why we do
+ * this. */
+ mtime = -1;
+ mtime_unset = (PyObject *)make_dirstate_tuple(
+ state, mode, size, mtime);
+ if (!mtime_unset)
+ goto bail;
+ if (PyDict_SetItem(map, k, mtime_unset) == -1)
+ goto bail;
+ Py_DECREF(mtime_unset);
+ mtime_unset = NULL;
+ }
+ *p++ = state;
+ putbe32((uint32_t)mode, p);
+ putbe32((uint32_t)size, p + 4);
+ putbe32((uint32_t)mtime, p + 8);
+ t = p + 12;
+ p += 16;
+ len = PyBytes_GET_SIZE(k);
+ memcpy(p, PyBytes_AS_STRING(k), len);
+ p += len;
+ o = PyDict_GetItem(copymap, k);
+ if (o) {
+ *p++ = '\0';
+ l = PyBytes_GET_SIZE(o);
+ memcpy(p, PyBytes_AS_STRING(o), l);
+ p += l;
+ len += l + 1;
+ }
+ putbe32((uint32_t)len, t);
+ }
+
+ pos = p - PyBytes_AS_STRING(packobj);
+ if (pos != nbytes) {
+ PyErr_Format(PyExc_SystemError, "bad dirstate size: %ld != %ld",
+ (long)pos, (long)nbytes);
+ goto bail;
+ }
+
+ return packobj;
+bail:
+ Py_XDECREF(mtime_unset);
+ Py_XDECREF(packobj);
+ Py_XDECREF(v);
+ return NULL;
+}
+
+#define BUMPED_FIX 1
+#define USING_SHA_256 2
+#define FM1_HEADER_SIZE (4 + 8 + 2 + 2 + 1 + 1 + 1)
+
+static PyObject *readshas(
+ const char *source, unsigned char num, Py_ssize_t hashwidth)
+{
+ int i;
+ PyObject *list = PyTuple_New(num);
+ if (list == NULL) {
+ return NULL;
+ }
+ for (i = 0; i < num; i++) {
+ PyObject *hash = PyBytes_FromStringAndSize(source, hashwidth);
+ if (hash == NULL) {
+ Py_DECREF(list);
+ return NULL;
+ }
+ PyTuple_SET_ITEM(list, i, hash);
+ source += hashwidth;
+ }
+ return list;
+}
+
+static PyObject *fm1readmarker(const char *databegin, const char *dataend,
+ uint32_t *msize)
+{
+ const char *data = databegin;
+ const char *meta;
+
+ double mtime;
+ int16_t tz;
+ uint16_t flags;
+ unsigned char nsuccs, nparents, nmetadata;
+ Py_ssize_t hashwidth = 20;
+
+ PyObject *prec = NULL, *parents = NULL, *succs = NULL;
+ PyObject *metadata = NULL, *ret = NULL;
+ int i;
+
+ if (data + FM1_HEADER_SIZE > dataend) {
+ goto overflow;
+ }
+
+ *msize = getbe32(data);
+ data += 4;
+ mtime = getbefloat64(data);
+ data += 8;
+ tz = getbeint16(data);
+ data += 2;
+ flags = getbeuint16(data);
+ data += 2;
+
+ if (flags & USING_SHA_256) {
+ hashwidth = 32;
+ }
+
+ nsuccs = (unsigned char)(*data++);
+ nparents = (unsigned char)(*data++);
+ nmetadata = (unsigned char)(*data++);
+
+ if (databegin + *msize > dataend) {
+ goto overflow;
+ }
+ dataend = databegin + *msize; /* narrow down to marker size */
+
+ if (data + hashwidth > dataend) {
+ goto overflow;
+ }
+ prec = PyBytes_FromStringAndSize(data, hashwidth);
+ data += hashwidth;
+ if (prec == NULL) {
+ goto bail;
+ }
+
+ if (data + nsuccs * hashwidth > dataend) {
+ goto overflow;
+ }
+ succs = readshas(data, nsuccs, hashwidth);
+ if (succs == NULL) {
+ goto bail;
+ }
+ data += nsuccs * hashwidth;
+
+ if (nparents == 1 || nparents == 2) {
+ if (data + nparents * hashwidth > dataend) {
+ goto overflow;
+ }
+ parents = readshas(data, nparents, hashwidth);
+ if (parents == NULL) {
+ goto bail;
+ }
+ data += nparents * hashwidth;
+ } else {
+ parents = Py_None;
+ Py_INCREF(parents);
+ }
+
+ if (data + 2 * nmetadata > dataend) {
+ goto overflow;
+ }
+ meta = data + (2 * nmetadata);
+ metadata = PyTuple_New(nmetadata);
+ if (metadata == NULL) {
+ goto bail;
+ }
+ for (i = 0; i < nmetadata; i++) {
+ PyObject *tmp, *left = NULL, *right = NULL;
+ Py_ssize_t leftsize = (unsigned char)(*data++);
+ Py_ssize_t rightsize = (unsigned char)(*data++);
+ if (meta + leftsize + rightsize > dataend) {
+ goto overflow;
+ }
+ left = PyBytes_FromStringAndSize(meta, leftsize);
+ meta += leftsize;
+ right = PyBytes_FromStringAndSize(meta, rightsize);
+ meta += rightsize;
+ tmp = PyTuple_New(2);
+ if (!left || !right || !tmp) {
+ Py_XDECREF(left);
+ Py_XDECREF(right);
+ Py_XDECREF(tmp);
+ goto bail;
+ }
+ PyTuple_SET_ITEM(tmp, 0, left);
+ PyTuple_SET_ITEM(tmp, 1, right);
+ PyTuple_SET_ITEM(metadata, i, tmp);
+ }
+ ret = Py_BuildValue("(OOHO(di)O)", prec, succs, flags,
+ metadata, mtime, (int)tz * 60, parents);
+ goto bail; /* return successfully */
+
+overflow:
+ PyErr_SetString(PyExc_ValueError, "overflow in obsstore");
+bail:
+ Py_XDECREF(prec);
+ Py_XDECREF(succs);
+ Py_XDECREF(metadata);
+ Py_XDECREF(parents);
+ return ret;
+}
+
+
+static PyObject *fm1readmarkers(PyObject *self, PyObject *args) {
+ const char *data, *dataend;
+ int datalen;
+ Py_ssize_t offset, stop;
+ PyObject *markers = NULL;
+
+ if (!PyArg_ParseTuple(args, "s#nn", &data, &datalen, &offset, &stop)) {
+ return NULL;
+ }
+ dataend = data + datalen;
+ data += offset;
+ markers = PyList_New(0);
+ if (!markers) {
+ return NULL;
+ }
+ while (offset < stop) {
+ uint32_t msize;
+ int error;
+ PyObject *record = fm1readmarker(data, dataend, &msize);
+ if (!record) {
+ goto bail;
+ }
+ error = PyList_Append(markers, record);
+ Py_DECREF(record);
+ if (error) {
+ goto bail;
+ }
+ data += msize;
+ offset += msize;
+ }
+ return markers;
+bail:
+ Py_DECREF(markers);
+ return NULL;
+}
+
+static char parsers_doc[] = "Efficient content parsing.";
+
+PyObject *encodedir(PyObject *self, PyObject *args);
+PyObject *pathencode(PyObject *self, PyObject *args);
+PyObject *lowerencode(PyObject *self, PyObject *args);
+PyObject *parse_index2(PyObject *self, PyObject *args);
+
+static PyMethodDef methods[] = {
+ {"pack_dirstate", pack_dirstate, METH_VARARGS, "pack a dirstate\n"},
+ {"nonnormalotherparententries", nonnormalotherparententries, METH_VARARGS,
+ "create a set containing non-normal and other parent entries of given "
+ "dirstate\n"},
+ {"parse_manifest", parse_manifest, METH_VARARGS, "parse a manifest\n"},
+ {"parse_dirstate", parse_dirstate, METH_VARARGS, "parse a dirstate\n"},
+ {"parse_index2", parse_index2, METH_VARARGS, "parse a revlog index\n"},
+ {"asciilower", asciilower, METH_VARARGS, "lowercase an ASCII string\n"},
+ {"asciiupper", asciiupper, METH_VARARGS, "uppercase an ASCII string\n"},
+ {"dict_new_presized", dict_new_presized, METH_VARARGS,
+ "construct a dict with an expected size\n"},
+ {"make_file_foldmap", make_file_foldmap, METH_VARARGS,
+ "make file foldmap\n"},
+ {"encodedir", encodedir, METH_VARARGS, "encodedir a path\n"},
+ {"pathencode", pathencode, METH_VARARGS, "fncache-encode a path\n"},
+ {"lowerencode", lowerencode, METH_VARARGS, "lower-encode a path\n"},
+ {"fm1readmarkers", fm1readmarkers, METH_VARARGS,
+ "parse v1 obsolete markers\n"},
+ {NULL, NULL}
+};
+
+void dirs_module_init(PyObject *mod);
+void manifest_module_init(PyObject *mod);
+void revlog_module_init(PyObject *mod);
+
+static const int version = 1;
+
+static void module_init(PyObject *mod)
+{
+ PyModule_AddIntConstant(mod, "version", version);
+
+ /* This module constant has two purposes. First, it lets us unit test
+ * the ImportError raised without hard-coding any error text. This
+ * means we can change the text in the future without breaking tests,
+ * even across changesets without a recompile. Second, its presence
+ * can be used to determine whether the version-checking logic is
+ * present, which also helps in testing across changesets without a
+ * recompile. Note that this means the pure-Python version of parsers
+ * should not have this module constant. */
+ PyModule_AddStringConstant(mod, "versionerrortext", versionerrortext);
+
+ dirs_module_init(mod);
+ manifest_module_init(mod);
+ revlog_module_init(mod);
+
+ if (PyType_Ready(&dirstateTupleType) < 0)
+ return;
+ Py_INCREF(&dirstateTupleType);
+ PyModule_AddObject(mod, "dirstatetuple",
+ (PyObject *)&dirstateTupleType);
+}
+
+static int check_python_version(void)
+{
+ PyObject *sys = PyImport_ImportModule("sys"), *ver;
+ long hexversion;
+ if (!sys)
+ return -1;
+ ver = PyObject_GetAttrString(sys, "hexversion");
+ Py_DECREF(sys);
+ if (!ver)
+ return -1;
+ hexversion = PyInt_AsLong(ver);
+ Py_DECREF(ver);
+ /* sys.hexversion is a 32-bit number by default, so the -1 case
+ * should only occur in unusual circumstances (e.g. if sys.hexversion
+ * is manually set to an invalid value). */
+ if ((hexversion == -1) || (hexversion >> 16 != PY_VERSION_HEX >> 16)) {
+ PyErr_Format(PyExc_ImportError, "%s: The Mercurial extension "
+ "modules were compiled with Python " PY_VERSION ", but "
+ "Mercurial is currently using Python with sys.hexversion=%ld: "
+ "Python %s\n at: %s", versionerrortext, hexversion,
+ Py_GetVersion(), Py_GetProgramFullPath());
+ return -1;
+ }
+ return 0;
+}
+
+#ifdef IS_PY3K
+static struct PyModuleDef parsers_module = {
+ PyModuleDef_HEAD_INIT,
+ "parsers",
+ parsers_doc,
+ -1,
+ methods
+};
+
+PyMODINIT_FUNC PyInit_parsers(void)
+{
+ PyObject *mod;
+
+ if (check_python_version() == -1)
+ return NULL;
+ mod = PyModule_Create(&parsers_module);
+ module_init(mod);
+ return mod;
+}
+#else
+PyMODINIT_FUNC initparsers(void)
+{
+ PyObject *mod;
+
+ if (check_python_version() == -1)
+ return;
+ mod = Py_InitModule3("parsers", methods, parsers_doc);
+ module_init(mod);
+}
+#endif
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cext/pathencode.c Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,765 @@
+/*
+ pathencode.c - efficient path name encoding
+
+ Copyright 2012 Facebook
+
+ This software may be used and distributed according to the terms of
+ the GNU General Public License, incorporated herein by reference.
+*/
+
+/*
+ * An implementation of the name encoding scheme used by the fncache
+ * store. The common case is of a path < 120 bytes long, which is
+ * handled either in a single pass with no allocations or two passes
+ * with a single allocation. For longer paths, multiple passes are
+ * required.
+ */
+
+#define PY_SSIZE_T_CLEAN
+#include <Python.h>
+#include <assert.h>
+#include <ctype.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "util.h"
+
+/* state machine for the fast path */
+enum path_state {
+ START, /* first byte of a path component */
+ A, /* "AUX" */
+ AU,
+ THIRD, /* third of a 3-byte sequence, e.g. "AUX", "NUL" */
+ C, /* "CON" or "COMn" */
+ CO,
+ COMLPT, /* "COM" or "LPT" */
+ COMLPTn,
+ L,
+ LP,
+ N,
+ NU,
+ P, /* "PRN" */
+ PR,
+ LDOT, /* leading '.' */
+ DOT, /* '.' in a non-leading position */
+ H, /* ".h" */
+ HGDI, /* ".hg", ".d", or ".i" */
+ SPACE,
+ DEFAULT /* byte of a path component after the first */
+};
+
+/* state machine for dir-encoding */
+enum dir_state {
+ DDOT,
+ DH,
+ DHGDI,
+ DDEFAULT
+};
+
+static inline int inset(const uint32_t bitset[], char c)
+{
+ return bitset[((uint8_t)c) >> 5] & (1 << (((uint8_t)c) & 31));
+}
+
+static inline void charcopy(char *dest, Py_ssize_t *destlen, size_t destsize,
+ char c)
+{
+ if (dest) {
+ assert(*destlen < destsize);
+ dest[*destlen] = c;
+ }
+ (*destlen)++;
+}
+
+static inline void memcopy(char *dest, Py_ssize_t *destlen, size_t destsize,
+ const void *src, Py_ssize_t len)
+{
+ if (dest) {
+ assert(*destlen + len < destsize);
+ memcpy((void *)&dest[*destlen], src, len);
+ }
+ *destlen += len;
+}
+
+static inline void hexencode(char *dest, Py_ssize_t *destlen, size_t destsize,
+ uint8_t c)
+{
+ static const char hexdigit[] = "0123456789abcdef";
+
+ charcopy(dest, destlen, destsize, hexdigit[c >> 4]);
+ charcopy(dest, destlen, destsize, hexdigit[c & 15]);
+}
+
+/* 3-byte escape: tilde followed by two hex digits */
+static inline void escape3(char *dest, Py_ssize_t *destlen, size_t destsize,
+ char c)
+{
+ charcopy(dest, destlen, destsize, '~');
+ hexencode(dest, destlen, destsize, c);
+}
+
+static Py_ssize_t _encodedir(char *dest, size_t destsize,
+ const char *src, Py_ssize_t len)
+{
+ enum dir_state state = DDEFAULT;
+ Py_ssize_t i = 0, destlen = 0;
+
+ while (i < len) {
+ switch (state) {
+ case DDOT:
+ switch (src[i]) {
+ case 'd':
+ case 'i':
+ state = DHGDI;
+ charcopy(dest, &destlen, destsize, src[i++]);
+ break;
+ case 'h':
+ state = DH;
+ charcopy(dest, &destlen, destsize, src[i++]);
+ break;
+ default:
+ state = DDEFAULT;
+ break;
+ }
+ break;
+ case DH:
+ if (src[i] == 'g') {
+ state = DHGDI;
+ charcopy(dest, &destlen, destsize, src[i++]);
+ }
+ else state = DDEFAULT;
+ break;
+ case DHGDI:
+ if (src[i] == '/') {
+ memcopy(dest, &destlen, destsize, ".hg", 3);
+ charcopy(dest, &destlen, destsize, src[i++]);
+ }
+ state = DDEFAULT;
+ break;
+ case DDEFAULT:
+ if (src[i] == '.')
+ state = DDOT;
+ charcopy(dest, &destlen, destsize, src[i++]);
+ break;
+ }
+ }
+
+ return destlen;
+}
+
+PyObject *encodedir(PyObject *self, PyObject *args)
+{
+ Py_ssize_t len, newlen;
+ PyObject *pathobj, *newobj;
+ char *path;
+
+ if (!PyArg_ParseTuple(args, "O:encodedir", &pathobj))
+ return NULL;
+
+ if (PyBytes_AsStringAndSize(pathobj, &path, &len) == -1) {
+ PyErr_SetString(PyExc_TypeError, "expected a string");
+ return NULL;
+ }
+
+ newlen = len ? _encodedir(NULL, 0, path, len + 1) : 1;
+
+ if (newlen == len + 1) {
+ Py_INCREF(pathobj);
+ return pathobj;
+ }
+
+ newobj = PyBytes_FromStringAndSize(NULL, newlen);
+
+ if (newobj) {
+ assert(PyBytes_Check(newobj));
+ Py_SIZE(newobj)--;
+ _encodedir(PyBytes_AS_STRING(newobj), newlen, path,
+ len + 1);
+ }
+
+ return newobj;
+}
+
+static Py_ssize_t _encode(const uint32_t twobytes[8], const uint32_t onebyte[8],
+ char *dest, Py_ssize_t destlen, size_t destsize,
+ const char *src, Py_ssize_t len,
+ int encodedir)
+{
+ enum path_state state = START;
+ Py_ssize_t i = 0;
+
+ /*
+ * Python strings end with a zero byte, which we use as a
+ * terminal token as they are not valid inside path names.
+ */
+
+ while (i < len) {
+ switch (state) {
+ case START:
+ switch (src[i]) {
+ case '/':
+ charcopy(dest, &destlen, destsize, src[i++]);
+ break;
+ case '.':
+ state = LDOT;
+ escape3(dest, &destlen, destsize, src[i++]);
+ break;
+ case ' ':
+ state = DEFAULT;
+ escape3(dest, &destlen, destsize, src[i++]);
+ break;
+ case 'a':
+ state = A;
+ charcopy(dest, &destlen, destsize, src[i++]);
+ break;
+ case 'c':
+ state = C;
+ charcopy(dest, &destlen, destsize, src[i++]);
+ break;
+ case 'l':
+ state = L;
+ charcopy(dest, &destlen, destsize, src[i++]);
+ break;
+ case 'n':
+ state = N;
+ charcopy(dest, &destlen, destsize, src[i++]);
+ break;
+ case 'p':
+ state = P;
+ charcopy(dest, &destlen, destsize, src[i++]);
+ break;
+ default:
+ state = DEFAULT;
+ break;
+ }
+ break;
+ case A:
+ if (src[i] == 'u') {
+ state = AU;
+ charcopy(dest, &destlen, destsize, src[i++]);
+ }
+ else state = DEFAULT;
+ break;
+ case AU:
+ if (src[i] == 'x') {
+ state = THIRD;
+ i++;
+ }
+ else state = DEFAULT;
+ break;
+ case THIRD:
+ state = DEFAULT;
+ switch (src[i]) {
+ case '.':
+ case '/':
+ case '\0':
+ escape3(dest, &destlen, destsize, src[i - 1]);
+ break;
+ default:
+ i--;
+ break;
+ }
+ break;
+ case C:
+ if (src[i] == 'o') {
+ state = CO;
+ charcopy(dest, &destlen, destsize, src[i++]);
+ }
+ else state = DEFAULT;
+ break;
+ case CO:
+ if (src[i] == 'm') {
+ state = COMLPT;
+ i++;
+ }
+ else if (src[i] == 'n') {
+ state = THIRD;
+ i++;
+ }
+ else state = DEFAULT;
+ break;
+ case COMLPT:
+ switch (src[i]) {
+ case '1': case '2': case '3': case '4': case '5':
+ case '6': case '7': case '8': case '9':
+ state = COMLPTn;
+ i++;
+ break;
+ default:
+ state = DEFAULT;
+ charcopy(dest, &destlen, destsize, src[i - 1]);
+ break;
+ }
+ break;
+ case COMLPTn:
+ state = DEFAULT;
+ switch (src[i]) {
+ case '.':
+ case '/':
+ case '\0':
+ escape3(dest, &destlen, destsize, src[i - 2]);
+ charcopy(dest, &destlen, destsize, src[i - 1]);
+ break;
+ default:
+ memcopy(dest, &destlen, destsize,
+ &src[i - 2], 2);
+ break;
+ }
+ break;
+ case L:
+ if (src[i] == 'p') {
+ state = LP;
+ charcopy(dest, &destlen, destsize, src[i++]);
+ }
+ else state = DEFAULT;
+ break;
+ case LP:
+ if (src[i] == 't') {
+ state = COMLPT;
+ i++;
+ }
+ else state = DEFAULT;
+ break;
+ case N:
+ if (src[i] == 'u') {
+ state = NU;
+ charcopy(dest, &destlen, destsize, src[i++]);
+ }
+ else state = DEFAULT;
+ break;
+ case NU:
+ if (src[i] == 'l') {
+ state = THIRD;
+ i++;
+ }
+ else state = DEFAULT;
+ break;
+ case P:
+ if (src[i] == 'r') {
+ state = PR;
+ charcopy(dest, &destlen, destsize, src[i++]);
+ }
+ else state = DEFAULT;
+ break;
+ case PR:
+ if (src[i] == 'n') {
+ state = THIRD;
+ i++;
+ }
+ else state = DEFAULT;
+ break;
+ case LDOT:
+ switch (src[i]) {
+ case 'd':
+ case 'i':
+ state = HGDI;
+ charcopy(dest, &destlen, destsize, src[i++]);
+ break;
+ case 'h':
+ state = H;
+ charcopy(dest, &destlen, destsize, src[i++]);
+ break;
+ default:
+ state = DEFAULT;
+ break;
+ }
+ break;
+ case DOT:
+ switch (src[i]) {
+ case '/':
+ case '\0':
+ state = START;
+ memcopy(dest, &destlen, destsize, "~2e", 3);
+ charcopy(dest, &destlen, destsize, src[i++]);
+ break;
+ case 'd':
+ case 'i':
+ state = HGDI;
+ charcopy(dest, &destlen, destsize, '.');
+ charcopy(dest, &destlen, destsize, src[i++]);
+ break;
+ case 'h':
+ state = H;
+ memcopy(dest, &destlen, destsize, ".h", 2);
+ i++;
+ break;
+ default:
+ state = DEFAULT;
+ charcopy(dest, &destlen, destsize, '.');
+ break;
+ }
+ break;
+ case H:
+ if (src[i] == 'g') {
+ state = HGDI;
+ charcopy(dest, &destlen, destsize, src[i++]);
+ }
+ else state = DEFAULT;
+ break;
+ case HGDI:
+ if (src[i] == '/') {
+ state = START;
+ if (encodedir)
+ memcopy(dest, &destlen, destsize, ".hg",
+ 3);
+ charcopy(dest, &destlen, destsize, src[i++]);
+ }
+ else state = DEFAULT;
+ break;
+ case SPACE:
+ switch (src[i]) {
+ case '/':
+ case '\0':
+ state = START;
+ memcopy(dest, &destlen, destsize, "~20", 3);
+ charcopy(dest, &destlen, destsize, src[i++]);
+ break;
+ default:
+ state = DEFAULT;
+ charcopy(dest, &destlen, destsize, ' ');
+ break;
+ }
+ break;
+ case DEFAULT:
+ while (inset(onebyte, src[i])) {
+ charcopy(dest, &destlen, destsize, src[i++]);
+ if (i == len)
+ goto done;
+ }
+ switch (src[i]) {
+ case '.':
+ state = DOT;
+ i++;
+ break;
+ case ' ':
+ state = SPACE;
+ i++;
+ break;
+ case '/':
+ state = START;
+ charcopy(dest, &destlen, destsize, '/');
+ i++;
+ break;
+ default:
+ if (inset(onebyte, src[i])) {
+ do {
+ charcopy(dest, &destlen,
+ destsize, src[i++]);
+ } while (i < len &&
+ inset(onebyte, src[i]));
+ }
+ else if (inset(twobytes, src[i])) {
+ char c = src[i++];
+ charcopy(dest, &destlen, destsize, '_');
+ charcopy(dest, &destlen, destsize,
+ c == '_' ? '_' : c + 32);
+ }
+ else
+ escape3(dest, &destlen, destsize,
+ src[i++]);
+ break;
+ }
+ break;
+ }
+ }
+done:
+ return destlen;
+}
+
+static Py_ssize_t basicencode(char *dest, size_t destsize,
+ const char *src, Py_ssize_t len)
+{
+ static const uint32_t twobytes[8] = { 0, 0, 0x87fffffe };
+
+ static const uint32_t onebyte[8] = {
+ 1, 0x2bff3bfa, 0x68000001, 0x2fffffff,
+ };
+
+ Py_ssize_t destlen = 0;
+
+ return _encode(twobytes, onebyte, dest, destlen, destsize,
+ src, len, 1);
+}
+
+static const Py_ssize_t maxstorepathlen = 120;
+
+static Py_ssize_t _lowerencode(char *dest, size_t destsize,
+ const char *src, Py_ssize_t len)
+{
+ static const uint32_t onebyte[8] = {
+ 1, 0x2bfffbfb, 0xe8000001, 0x2fffffff
+ };
+
+ static const uint32_t lower[8] = { 0, 0, 0x7fffffe };
+
+ Py_ssize_t i, destlen = 0;
+
+ for (i = 0; i < len; i++) {
+ if (inset(onebyte, src[i]))
+ charcopy(dest, &destlen, destsize, src[i]);
+ else if (inset(lower, src[i]))
+ charcopy(dest, &destlen, destsize, src[i] + 32);
+ else
+ escape3(dest, &destlen, destsize, src[i]);
+ }
+
+ return destlen;
+}
+
+PyObject *lowerencode(PyObject *self, PyObject *args)
+{
+ char *path;
+ Py_ssize_t len, newlen;
+ PyObject *ret;
+
+ if (!PyArg_ParseTuple(args, "s#:lowerencode", &path, &len))
+ return NULL;
+
+ newlen = _lowerencode(NULL, 0, path, len);
+ ret = PyBytes_FromStringAndSize(NULL, newlen);
+ if (ret)
+ _lowerencode(PyBytes_AS_STRING(ret), newlen, path, len);
+
+ return ret;
+}
+
+/* See store.py:_auxencode for a description. */
+static Py_ssize_t auxencode(char *dest, size_t destsize,
+ const char *src, Py_ssize_t len)
+{
+ static const uint32_t twobytes[8];
+
+ static const uint32_t onebyte[8] = {
+ ~0U, 0xffff3ffe, ~0U, ~0U, ~0U, ~0U, ~0U, ~0U,
+ };
+
+ return _encode(twobytes, onebyte, dest, 0, destsize, src, len, 0);
+}
+
+static PyObject *hashmangle(const char *src, Py_ssize_t len, const char sha[20])
+{
+ static const Py_ssize_t dirprefixlen = 8;
+ static const Py_ssize_t maxshortdirslen = 68;
+ char *dest;
+ PyObject *ret;
+
+ Py_ssize_t i, d, p, lastslash = len - 1, lastdot = -1;
+ Py_ssize_t destsize, destlen = 0, slop, used;
+
+ while (lastslash >= 0 && src[lastslash] != '/') {
+ if (src[lastslash] == '.' && lastdot == -1)
+ lastdot = lastslash;
+ lastslash--;
+ }
+
+#if 0
+ /* All paths should end in a suffix of ".i" or ".d".
+ Unfortunately, the file names in test-hybridencode.py
+ violate this rule. */
+ if (lastdot != len - 3) {
+ PyErr_SetString(PyExc_ValueError,
+ "suffix missing or wrong length");
+ return NULL;
+ }
+#endif
+
+ /* If src contains a suffix, we will append it to the end of
+ the new string, so make room. */
+ destsize = 120;
+ if (lastdot >= 0)
+ destsize += len - lastdot - 1;
+
+ ret = PyBytes_FromStringAndSize(NULL, destsize);
+ if (ret == NULL)
+ return NULL;
+
+ dest = PyBytes_AS_STRING(ret);
+ memcopy(dest, &destlen, destsize, "dh/", 3);
+
+ /* Copy up to dirprefixlen bytes of each path component, up to
+ a limit of maxshortdirslen bytes. */
+ for (i = d = p = 0; i < lastslash; i++, p++) {
+ if (src[i] == '/') {
+ char d = dest[destlen - 1];
+ /* After truncation, a directory name may end
+ in a space or dot, which are unportable. */
+ if (d == '.' || d == ' ')
+ dest[destlen - 1] = '_';
+ /* The + 3 is to account for "dh/" in the beginning */
+ if (destlen > maxshortdirslen + 3)
+ break;
+ charcopy(dest, &destlen, destsize, src[i]);
+ p = -1;
+ }
+ else if (p < dirprefixlen)
+ charcopy(dest, &destlen, destsize, src[i]);
+ }
+
+ /* Rewind to just before the last slash copied. */
+ if (destlen > maxshortdirslen + 3)
+ do {
+ destlen--;
+ } while (destlen > 0 && dest[destlen] != '/');
+
+ if (destlen > 3) {
+ if (lastslash > 0) {
+ char d = dest[destlen - 1];
+ /* The last directory component may be
+ truncated, so make it safe. */
+ if (d == '.' || d == ' ')
+ dest[destlen - 1] = '_';
+ }
+
+ charcopy(dest, &destlen, destsize, '/');
+ }
+
+ /* Add a prefix of the original file's name. Its length
+ depends on the number of bytes left after accounting for
+ hash and suffix. */
+ used = destlen + 40;
+ if (lastdot >= 0)
+ used += len - lastdot - 1;
+ slop = maxstorepathlen - used;
+ if (slop > 0) {
+ Py_ssize_t basenamelen =
+ lastslash >= 0 ? len - lastslash - 2 : len - 1;
+
+ if (basenamelen > slop)
+ basenamelen = slop;
+ if (basenamelen > 0)
+ memcopy(dest, &destlen, destsize, &src[lastslash + 1],
+ basenamelen);
+ }
+
+ /* Add hash and suffix. */
+ for (i = 0; i < 20; i++)
+ hexencode(dest, &destlen, destsize, sha[i]);
+
+ if (lastdot >= 0)
+ memcopy(dest, &destlen, destsize, &src[lastdot],
+ len - lastdot - 1);
+
+ assert(PyBytes_Check(ret));
+ Py_SIZE(ret) = destlen;
+
+ return ret;
+}
+
+/*
+ * Avoiding a trip through Python would improve performance by 50%,
+ * but we don't encounter enough long names to be worth the code.
+ */
+static int sha1hash(char hash[20], const char *str, Py_ssize_t len)
+{
+ static PyObject *shafunc;
+ PyObject *shaobj, *hashobj;
+
+ if (shafunc == NULL) {
+ PyObject *hashlib, *name = PyBytes_FromString("hashlib");
+
+ if (name == NULL)
+ return -1;
+
+ hashlib = PyImport_Import(name);
+ Py_DECREF(name);
+
+ if (hashlib == NULL) {
+ PyErr_SetString(PyExc_ImportError, "hashlib");
+ return -1;
+ }
+ shafunc = PyObject_GetAttrString(hashlib, "sha1");
+ Py_DECREF(hashlib);
+
+ if (shafunc == NULL) {
+ PyErr_SetString(PyExc_AttributeError,
+ "module 'hashlib' has no "
+ "attribute 'sha1'");
+ return -1;
+ }
+ }
+
+ shaobj = PyObject_CallFunction(shafunc, "s#", str, len);
+
+ if (shaobj == NULL)
+ return -1;
+
+ hashobj = PyObject_CallMethod(shaobj, "digest", "");
+ Py_DECREF(shaobj);
+ if (hashobj == NULL)
+ return -1;
+
+ if (!PyBytes_Check(hashobj) || PyBytes_GET_SIZE(hashobj) != 20) {
+ PyErr_SetString(PyExc_TypeError,
+ "result of digest is not a 20-byte hash");
+ Py_DECREF(hashobj);
+ return -1;
+ }
+
+ memcpy(hash, PyBytes_AS_STRING(hashobj), 20);
+ Py_DECREF(hashobj);
+ return 0;
+}
+
+#define MAXENCODE 4096 * 4
+
+static PyObject *hashencode(const char *src, Py_ssize_t len)
+{
+ char dired[MAXENCODE];
+ char lowered[MAXENCODE];
+ char auxed[MAXENCODE];
+ Py_ssize_t dirlen, lowerlen, auxlen, baselen;
+ char sha[20];
+
+ baselen = (len - 5) * 3;
+ if (baselen >= MAXENCODE) {
+ PyErr_SetString(PyExc_ValueError, "string too long");
+ return NULL;
+ }
+
+ dirlen = _encodedir(dired, baselen, src, len);
+ if (sha1hash(sha, dired, dirlen - 1) == -1)
+ return NULL;
+ lowerlen = _lowerencode(lowered, baselen, dired + 5, dirlen - 5);
+ auxlen = auxencode(auxed, baselen, lowered, lowerlen);
+ return hashmangle(auxed, auxlen, sha);
+}
+
+PyObject *pathencode(PyObject *self, PyObject *args)
+{
+ Py_ssize_t len, newlen;
+ PyObject *pathobj, *newobj;
+ char *path;
+
+ if (!PyArg_ParseTuple(args, "O:pathencode", &pathobj))
+ return NULL;
+
+ if (PyBytes_AsStringAndSize(pathobj, &path, &len) == -1) {
+ PyErr_SetString(PyExc_TypeError, "expected a string");
+ return NULL;
+ }
+
+ if (len > maxstorepathlen)
+ newlen = maxstorepathlen + 2;
+ else
+ newlen = len ? basicencode(NULL, 0, path, len + 1) : 1;
+
+ if (newlen <= maxstorepathlen + 1) {
+ if (newlen == len + 1) {
+ Py_INCREF(pathobj);
+ return pathobj;
+ }
+
+ newobj = PyBytes_FromStringAndSize(NULL, newlen);
+
+ if (newobj) {
+ assert(PyBytes_Check(newobj));
+ Py_SIZE(newobj)--;
+ basicencode(PyBytes_AS_STRING(newobj), newlen, path,
+ len + 1);
+ }
+ }
+ else
+ newobj = hashencode(path, len + 1);
+
+ return newobj;
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cext/revlog.c Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,1942 @@
+/*
+ parsers.c - efficient content parsing
+
+ Copyright 2008 Matt Mackall <mpm@selenic.com> and others
+
+ This software may be used and distributed according to the terms of
+ the GNU General Public License, incorporated herein by reference.
+*/
+
+#include <Python.h>
+#include <ctype.h>
+#include <stddef.h>
+#include <string.h>
+
+#include "util.h"
+#include "bitmanipulation.h"
+
+#ifdef IS_PY3K
+/* The mapping of Python types is meant to be temporary to get Python
+ * 3 to compile. We should remove this once Python 3 support is fully
+ * supported and proper types are used in the extensions themselves. */
+#define PyInt_Check PyLong_Check
+#define PyInt_FromLong PyLong_FromLong
+#define PyInt_FromSsize_t PyLong_FromSsize_t
+#define PyInt_AS_LONG PyLong_AS_LONG
+#define PyInt_AsLong PyLong_AsLong
+#endif
+
+/*
+ * A base-16 trie for fast node->rev mapping.
+ *
+ * Positive value is index of the next node in the trie
+ * Negative value is a leaf: -(rev + 1)
+ * Zero is empty
+ */
+typedef struct {
+ int children[16];
+} nodetree;
+
+/*
+ * This class has two behaviors.
+ *
+ * When used in a list-like way (with integer keys), we decode an
+ * entry in a RevlogNG index file on demand. Our last entry is a
+ * sentinel, always a nullid. We have limited support for
+ * integer-keyed insert and delete, only at elements right before the
+ * sentinel.
+ *
+ * With string keys, we lazily perform a reverse mapping from node to
+ * rev, using a base-16 trie.
+ */
+typedef struct {
+ PyObject_HEAD
+ /* Type-specific fields go here. */
+ PyObject *data; /* raw bytes of index */
+ Py_buffer buf; /* buffer of data */
+ PyObject **cache; /* cached tuples */
+ const char **offsets; /* populated on demand */
+ Py_ssize_t raw_length; /* original number of elements */
+ Py_ssize_t length; /* current number of elements */
+ PyObject *added; /* populated on demand */
+ PyObject *headrevs; /* cache, invalidated on changes */
+ PyObject *filteredrevs;/* filtered revs set */
+ nodetree *nt; /* base-16 trie */
+ unsigned ntlength; /* # nodes in use */
+ unsigned ntcapacity; /* # nodes allocated */
+ int ntdepth; /* maximum depth of tree */
+ int ntsplits; /* # splits performed */
+ int ntrev; /* last rev scanned */
+ int ntlookups; /* # lookups */
+ int ntmisses; /* # lookups that miss the cache */
+ int inlined;
+} indexObject;
+
+static Py_ssize_t index_length(const indexObject *self)
+{
+ if (self->added == NULL)
+ return self->length;
+ return self->length + PyList_GET_SIZE(self->added);
+}
+
+static PyObject *nullentry;
+static const char nullid[20];
+
+static Py_ssize_t inline_scan(indexObject *self, const char **offsets);
+
+#if LONG_MAX == 0x7fffffffL
+static char *tuple_format = "Kiiiiiis#";
+#else
+static char *tuple_format = "kiiiiiis#";
+#endif
+
+/* A RevlogNG v1 index entry is 64 bytes long. */
+static const long v1_hdrsize = 64;
+
+/*
+ * Return a pointer to the beginning of a RevlogNG record.
+ */
+static const char *index_deref(indexObject *self, Py_ssize_t pos)
+{
+ if (self->inlined && pos > 0) {
+ if (self->offsets == NULL) {
+ self->offsets = PyMem_Malloc(self->raw_length *
+ sizeof(*self->offsets));
+ if (self->offsets == NULL)
+ return (const char *)PyErr_NoMemory();
+ inline_scan(self, self->offsets);
+ }
+ return self->offsets[pos];
+ }
+
+ return (const char *)(self->buf.buf) + pos * v1_hdrsize;
+}
+
+static inline int index_get_parents(indexObject *self, Py_ssize_t rev,
+ int *ps, int maxrev)
+{
+ if (rev >= self->length - 1) {
+ PyObject *tuple = PyList_GET_ITEM(self->added,
+ rev - self->length + 1);
+ ps[0] = (int)PyInt_AS_LONG(PyTuple_GET_ITEM(tuple, 5));
+ ps[1] = (int)PyInt_AS_LONG(PyTuple_GET_ITEM(tuple, 6));
+ } else {
+ const char *data = index_deref(self, rev);
+ ps[0] = getbe32(data + 24);
+ ps[1] = getbe32(data + 28);
+ }
+ /* If index file is corrupted, ps[] may point to invalid revisions. So
+ * there is a risk of buffer overflow to trust them unconditionally. */
+ if (ps[0] > maxrev || ps[1] > maxrev) {
+ PyErr_SetString(PyExc_ValueError, "parent out of range");
+ return -1;
+ }
+ return 0;
+}
+
+
+/*
+ * RevlogNG format (all in big endian, data may be inlined):
+ * 6 bytes: offset
+ * 2 bytes: flags
+ * 4 bytes: compressed length
+ * 4 bytes: uncompressed length
+ * 4 bytes: base revision
+ * 4 bytes: link revision
+ * 4 bytes: parent 1 revision
+ * 4 bytes: parent 2 revision
+ * 32 bytes: nodeid (only 20 bytes used)
+ */
+static PyObject *index_get(indexObject *self, Py_ssize_t pos)
+{
+ uint64_t offset_flags;
+ int comp_len, uncomp_len, base_rev, link_rev, parent_1, parent_2;
+ const char *c_node_id;
+ const char *data;
+ Py_ssize_t length = index_length(self);
+ PyObject *entry;
+
+ if (pos < 0)
+ pos += length;
+
+ if (pos < 0 || pos >= length) {
+ PyErr_SetString(PyExc_IndexError, "revlog index out of range");
+ return NULL;
+ }
+
+ if (pos == length - 1) {
+ Py_INCREF(nullentry);
+ return nullentry;
+ }
+
+ if (pos >= self->length - 1) {
+ PyObject *obj;
+ obj = PyList_GET_ITEM(self->added, pos - self->length + 1);
+ Py_INCREF(obj);
+ return obj;
+ }
+
+ if (self->cache) {
+ if (self->cache[pos]) {
+ Py_INCREF(self->cache[pos]);
+ return self->cache[pos];
+ }
+ } else {
+ self->cache = calloc(self->raw_length, sizeof(PyObject *));
+ if (self->cache == NULL)
+ return PyErr_NoMemory();
+ }
+
+ data = index_deref(self, pos);
+ if (data == NULL)
+ return NULL;
+
+ offset_flags = getbe32(data + 4);
+ if (pos == 0) /* mask out version number for the first entry */
+ offset_flags &= 0xFFFF;
+ else {
+ uint32_t offset_high = getbe32(data);
+ offset_flags |= ((uint64_t)offset_high) << 32;
+ }
+
+ comp_len = getbe32(data + 8);
+ uncomp_len = getbe32(data + 12);
+ base_rev = getbe32(data + 16);
+ link_rev = getbe32(data + 20);
+ parent_1 = getbe32(data + 24);
+ parent_2 = getbe32(data + 28);
+ c_node_id = data + 32;
+
+ entry = Py_BuildValue(tuple_format, offset_flags, comp_len,
+ uncomp_len, base_rev, link_rev,
+ parent_1, parent_2, c_node_id, 20);
+
+ if (entry) {
+ PyObject_GC_UnTrack(entry);
+ Py_INCREF(entry);
+ }
+
+ self->cache[pos] = entry;
+
+ return entry;
+}
+
+/*
+ * Return the 20-byte SHA of the node corresponding to the given rev.
+ */
+static const char *index_node(indexObject *self, Py_ssize_t pos)
+{
+ Py_ssize_t length = index_length(self);
+ const char *data;
+
+ if (pos == length - 1 || pos == INT_MAX)
+ return nullid;
+
+ if (pos >= length)
+ return NULL;
+
+ if (pos >= self->length - 1) {
+ PyObject *tuple, *str;
+ tuple = PyList_GET_ITEM(self->added, pos - self->length + 1);
+ str = PyTuple_GetItem(tuple, 7);
+ return str ? PyBytes_AS_STRING(str) : NULL;
+ }
+
+ data = index_deref(self, pos);
+ return data ? data + 32 : NULL;
+}
+
+static int nt_insert(indexObject *self, const char *node, int rev);
+
+static int node_check(PyObject *obj, char **node, Py_ssize_t *nodelen)
+{
+ if (PyBytes_AsStringAndSize(obj, node, nodelen) == -1)
+ return -1;
+ if (*nodelen == 20)
+ return 0;
+ PyErr_SetString(PyExc_ValueError, "20-byte hash required");
+ return -1;
+}
+
+static PyObject *index_insert(indexObject *self, PyObject *args)
+{
+ PyObject *obj;
+ char *node;
+ int index;
+ Py_ssize_t len, nodelen;
+
+ if (!PyArg_ParseTuple(args, "iO", &index, &obj))
+ return NULL;
+
+ if (!PyTuple_Check(obj) || PyTuple_GET_SIZE(obj) != 8) {
+ PyErr_SetString(PyExc_TypeError, "8-tuple required");
+ return NULL;
+ }
+
+ if (node_check(PyTuple_GET_ITEM(obj, 7), &node, &nodelen) == -1)
+ return NULL;
+
+ len = index_length(self);
+
+ if (index < 0)
+ index += len;
+
+ if (index != len - 1) {
+ PyErr_SetString(PyExc_IndexError,
+ "insert only supported at index -1");
+ return NULL;
+ }
+
+ if (self->added == NULL) {
+ self->added = PyList_New(0);
+ if (self->added == NULL)
+ return NULL;
+ }
+
+ if (PyList_Append(self->added, obj) == -1)
+ return NULL;
+
+ if (self->nt)
+ nt_insert(self, node, index);
+
+ Py_CLEAR(self->headrevs);
+ Py_RETURN_NONE;
+}
+
+static void _index_clearcaches(indexObject *self)
+{
+ if (self->cache) {
+ Py_ssize_t i;
+
+ for (i = 0; i < self->raw_length; i++)
+ Py_CLEAR(self->cache[i]);
+ free(self->cache);
+ self->cache = NULL;
+ }
+ if (self->offsets) {
+ PyMem_Free(self->offsets);
+ self->offsets = NULL;
+ }
+ if (self->nt) {
+ free(self->nt);
+ self->nt = NULL;
+ }
+ Py_CLEAR(self->headrevs);
+}
+
+static PyObject *index_clearcaches(indexObject *self)
+{
+ _index_clearcaches(self);
+ self->ntlength = self->ntcapacity = 0;
+ self->ntdepth = self->ntsplits = 0;
+ self->ntrev = -1;
+ self->ntlookups = self->ntmisses = 0;
+ Py_RETURN_NONE;
+}
+
+static PyObject *index_stats(indexObject *self)
+{
+ PyObject *obj = PyDict_New();
+ PyObject *t = NULL;
+
+ if (obj == NULL)
+ return NULL;
+
+#define istat(__n, __d) \
+ do { \
+ t = PyInt_FromSsize_t(self->__n); \
+ if (!t) \
+ goto bail; \
+ if (PyDict_SetItemString(obj, __d, t) == -1) \
+ goto bail; \
+ Py_DECREF(t); \
+ } while (0)
+
+ if (self->added) {
+ Py_ssize_t len = PyList_GET_SIZE(self->added);
+ t = PyInt_FromSsize_t(len);
+ if (!t)
+ goto bail;
+ if (PyDict_SetItemString(obj, "index entries added", t) == -1)
+ goto bail;
+ Py_DECREF(t);
+ }
+
+ if (self->raw_length != self->length - 1)
+ istat(raw_length, "revs on disk");
+ istat(length, "revs in memory");
+ istat(ntcapacity, "node trie capacity");
+ istat(ntdepth, "node trie depth");
+ istat(ntlength, "node trie count");
+ istat(ntlookups, "node trie lookups");
+ istat(ntmisses, "node trie misses");
+ istat(ntrev, "node trie last rev scanned");
+ istat(ntsplits, "node trie splits");
+
+#undef istat
+
+ return obj;
+
+bail:
+ Py_XDECREF(obj);
+ Py_XDECREF(t);
+ return NULL;
+}
+
+/*
+ * When we cache a list, we want to be sure the caller can't mutate
+ * the cached copy.
+ */
+static PyObject *list_copy(PyObject *list)
+{
+ Py_ssize_t len = PyList_GET_SIZE(list);
+ PyObject *newlist = PyList_New(len);
+ Py_ssize_t i;
+
+ if (newlist == NULL)
+ return NULL;
+
+ for (i = 0; i < len; i++) {
+ PyObject *obj = PyList_GET_ITEM(list, i);
+ Py_INCREF(obj);
+ PyList_SET_ITEM(newlist, i, obj);
+ }
+
+ return newlist;
+}
+
+static int check_filter(PyObject *filter, Py_ssize_t arg) {
+ if (filter) {
+ PyObject *arglist, *result;
+ int isfiltered;
+
+ arglist = Py_BuildValue("(n)", arg);
+ if (!arglist) {
+ return -1;
+ }
+
+ result = PyEval_CallObject(filter, arglist);
+ Py_DECREF(arglist);
+ if (!result) {
+ return -1;
+ }
+
+ /* PyObject_IsTrue returns 1 if true, 0 if false, -1 if error,
+ * same as this function, so we can just return it directly.*/
+ isfiltered = PyObject_IsTrue(result);
+ Py_DECREF(result);
+ return isfiltered;
+ } else {
+ return 0;
+ }
+}
+
+static Py_ssize_t add_roots_get_min(indexObject *self, PyObject *list,
+ Py_ssize_t marker, char *phases)
+{
+ PyObject *iter = NULL;
+ PyObject *iter_item = NULL;
+ Py_ssize_t min_idx = index_length(self) + 1;
+ long iter_item_long;
+
+ if (PyList_GET_SIZE(list) != 0) {
+ iter = PyObject_GetIter(list);
+ if (iter == NULL)
+ return -2;
+ while ((iter_item = PyIter_Next(iter)))
+ {
+ iter_item_long = PyInt_AS_LONG(iter_item);
+ Py_DECREF(iter_item);
+ if (iter_item_long < min_idx)
+ min_idx = iter_item_long;
+ phases[iter_item_long] = marker;
+ }
+ Py_DECREF(iter);
+ }
+
+ return min_idx;
+}
+
+static inline void set_phase_from_parents(char *phases, int parent_1,
+ int parent_2, Py_ssize_t i)
+{
+ if (parent_1 >= 0 && phases[parent_1] > phases[i])
+ phases[i] = phases[parent_1];
+ if (parent_2 >= 0 && phases[parent_2] > phases[i])
+ phases[i] = phases[parent_2];
+}
+
+static PyObject *reachableroots2(indexObject *self, PyObject *args)
+{
+
+ /* Input */
+ long minroot;
+ PyObject *includepatharg = NULL;
+ int includepath = 0;
+ /* heads and roots are lists */
+ PyObject *heads = NULL;
+ PyObject *roots = NULL;
+ PyObject *reachable = NULL;
+
+ PyObject *val;
+ Py_ssize_t len = index_length(self) - 1;
+ long revnum;
+ Py_ssize_t k;
+ Py_ssize_t i;
+ Py_ssize_t l;
+ int r;
+ int parents[2];
+
+ /* Internal data structure:
+ * tovisit: array of length len+1 (all revs + nullrev), filled upto lentovisit
+ * revstates: array of length len+1 (all revs + nullrev) */
+ int *tovisit = NULL;
+ long lentovisit = 0;
+ enum { RS_SEEN = 1, RS_ROOT = 2, RS_REACHABLE = 4 };
+ char *revstates = NULL;
+
+ /* Get arguments */
+ if (!PyArg_ParseTuple(args, "lO!O!O!", &minroot, &PyList_Type, &heads,
+ &PyList_Type, &roots,
+ &PyBool_Type, &includepatharg))
+ goto bail;
+
+ if (includepatharg == Py_True)
+ includepath = 1;
+
+ /* Initialize return set */
+ reachable = PyList_New(0);
+ if (reachable == NULL)
+ goto bail;
+
+ /* Initialize internal datastructures */
+ tovisit = (int *)malloc((len + 1) * sizeof(int));
+ if (tovisit == NULL) {
+ PyErr_NoMemory();
+ goto bail;
+ }
+
+ revstates = (char *)calloc(len + 1, 1);
+ if (revstates == NULL) {
+ PyErr_NoMemory();
+ goto bail;
+ }
+
+ l = PyList_GET_SIZE(roots);
+ for (i = 0; i < l; i++) {
+ revnum = PyInt_AsLong(PyList_GET_ITEM(roots, i));
+ if (revnum == -1 && PyErr_Occurred())
+ goto bail;
+ /* If root is out of range, e.g. wdir(), it must be unreachable
+ * from heads. So we can just ignore it. */
+ if (revnum + 1 < 0 || revnum + 1 >= len + 1)
+ continue;
+ revstates[revnum + 1] |= RS_ROOT;
+ }
+
+ /* Populate tovisit with all the heads */
+ l = PyList_GET_SIZE(heads);
+ for (i = 0; i < l; i++) {
+ revnum = PyInt_AsLong(PyList_GET_ITEM(heads, i));
+ if (revnum == -1 && PyErr_Occurred())
+ goto bail;
+ if (revnum + 1 < 0 || revnum + 1 >= len + 1) {
+ PyErr_SetString(PyExc_IndexError, "head out of range");
+ goto bail;
+ }
+ if (!(revstates[revnum + 1] & RS_SEEN)) {
+ tovisit[lentovisit++] = (int)revnum;
+ revstates[revnum + 1] |= RS_SEEN;
+ }
+ }
+
+ /* Visit the tovisit list and find the reachable roots */
+ k = 0;
+ while (k < lentovisit) {
+ /* Add the node to reachable if it is a root*/
+ revnum = tovisit[k++];
+ if (revstates[revnum + 1] & RS_ROOT) {
+ revstates[revnum + 1] |= RS_REACHABLE;
+ val = PyInt_FromLong(revnum);
+ if (val == NULL)
+ goto bail;
+ r = PyList_Append(reachable, val);
+ Py_DECREF(val);
+ if (r < 0)
+ goto bail;
+ if (includepath == 0)
+ continue;
+ }
+
+ /* Add its parents to the list of nodes to visit */
+ if (revnum == -1)
+ continue;
+ r = index_get_parents(self, revnum, parents, (int)len - 1);
+ if (r < 0)
+ goto bail;
+ for (i = 0; i < 2; i++) {
+ if (!(revstates[parents[i] + 1] & RS_SEEN)
+ && parents[i] >= minroot) {
+ tovisit[lentovisit++] = parents[i];
+ revstates[parents[i] + 1] |= RS_SEEN;
+ }
+ }
+ }
+
+ /* Find all the nodes in between the roots we found and the heads
+ * and add them to the reachable set */
+ if (includepath == 1) {
+ long minidx = minroot;
+ if (minidx < 0)
+ minidx = 0;
+ for (i = minidx; i < len; i++) {
+ if (!(revstates[i + 1] & RS_SEEN))
+ continue;
+ r = index_get_parents(self, i, parents, (int)len - 1);
+ /* Corrupted index file, error is set from
+ * index_get_parents */
+ if (r < 0)
+ goto bail;
+ if (((revstates[parents[0] + 1] |
+ revstates[parents[1] + 1]) & RS_REACHABLE)
+ && !(revstates[i + 1] & RS_REACHABLE)) {
+ revstates[i + 1] |= RS_REACHABLE;
+ val = PyInt_FromLong(i);
+ if (val == NULL)
+ goto bail;
+ r = PyList_Append(reachable, val);
+ Py_DECREF(val);
+ if (r < 0)
+ goto bail;
+ }
+ }
+ }
+
+ free(revstates);
+ free(tovisit);
+ return reachable;
+bail:
+ Py_XDECREF(reachable);
+ free(revstates);
+ free(tovisit);
+ return NULL;
+}
+
+static PyObject *compute_phases_map_sets(indexObject *self, PyObject *args)
+{
+ PyObject *roots = Py_None;
+ PyObject *ret = NULL;
+ PyObject *phaseslist = NULL;
+ PyObject *phaseroots = NULL;
+ PyObject *phaseset = NULL;
+ PyObject *phasessetlist = NULL;
+ PyObject *rev = NULL;
+ Py_ssize_t len = index_length(self) - 1;
+ Py_ssize_t numphase = 0;
+ Py_ssize_t minrevallphases = 0;
+ Py_ssize_t minrevphase = 0;
+ Py_ssize_t i = 0;
+ char *phases = NULL;
+ long phase;
+
+ if (!PyArg_ParseTuple(args, "O", &roots))
+ goto done;
+ if (roots == NULL || !PyList_Check(roots))
+ goto done;
+
+ phases = calloc(len, 1); /* phase per rev: {0: public, 1: draft, 2: secret} */
+ if (phases == NULL) {
+ PyErr_NoMemory();
+ goto done;
+ }
+ /* Put the phase information of all the roots in phases */
+ numphase = PyList_GET_SIZE(roots)+1;
+ minrevallphases = len + 1;
+ phasessetlist = PyList_New(numphase);
+ if (phasessetlist == NULL)
+ goto done;
+
+ PyList_SET_ITEM(phasessetlist, 0, Py_None);
+ Py_INCREF(Py_None);
+
+ for (i = 0; i < numphase-1; i++) {
+ phaseroots = PyList_GET_ITEM(roots, i);
+ phaseset = PySet_New(NULL);
+ if (phaseset == NULL)
+ goto release;
+ PyList_SET_ITEM(phasessetlist, i+1, phaseset);
+ if (!PyList_Check(phaseroots))
+ goto release;
+ minrevphase = add_roots_get_min(self, phaseroots, i+1, phases);
+ if (minrevphase == -2) /* Error from add_roots_get_min */
+ goto release;
+ minrevallphases = MIN(minrevallphases, minrevphase);
+ }
+ /* Propagate the phase information from the roots to the revs */
+ if (minrevallphases != -1) {
+ int parents[2];
+ for (i = minrevallphases; i < len; i++) {
+ if (index_get_parents(self, i, parents,
+ (int)len - 1) < 0)
+ goto release;
+ set_phase_from_parents(phases, parents[0], parents[1], i);
+ }
+ }
+ /* Transform phase list to a python list */
+ phaseslist = PyList_New(len);
+ if (phaseslist == NULL)
+ goto release;
+ for (i = 0; i < len; i++) {
+ PyObject *phaseval;
+
+ phase = phases[i];
+ /* We only store the sets of phase for non public phase, the public phase
+ * is computed as a difference */
+ if (phase != 0) {
+ phaseset = PyList_GET_ITEM(phasessetlist, phase);
+ rev = PyInt_FromLong(i);
+ if (rev == NULL)
+ goto release;
+ PySet_Add(phaseset, rev);
+ Py_XDECREF(rev);
+ }
+ phaseval = PyInt_FromLong(phase);
+ if (phaseval == NULL)
+ goto release;
+ PyList_SET_ITEM(phaseslist, i, phaseval);
+ }
+ ret = PyTuple_Pack(2, phaseslist, phasessetlist);
+
+release:
+ Py_XDECREF(phaseslist);
+ Py_XDECREF(phasessetlist);
+done:
+ free(phases);
+ return ret;
+}
+
+static PyObject *index_headrevs(indexObject *self, PyObject *args)
+{
+ Py_ssize_t i, j, len;
+ char *nothead = NULL;
+ PyObject *heads = NULL;
+ PyObject *filter = NULL;
+ PyObject *filteredrevs = Py_None;
+
+ if (!PyArg_ParseTuple(args, "|O", &filteredrevs)) {
+ return NULL;
+ }
+
+ if (self->headrevs && filteredrevs == self->filteredrevs)
+ return list_copy(self->headrevs);
+
+ Py_DECREF(self->filteredrevs);
+ self->filteredrevs = filteredrevs;
+ Py_INCREF(filteredrevs);
+
+ if (filteredrevs != Py_None) {
+ filter = PyObject_GetAttrString(filteredrevs, "__contains__");
+ if (!filter) {
+ PyErr_SetString(PyExc_TypeError,
+ "filteredrevs has no attribute __contains__");
+ goto bail;
+ }
+ }
+
+ len = index_length(self) - 1;
+ heads = PyList_New(0);
+ if (heads == NULL)
+ goto bail;
+ if (len == 0) {
+ PyObject *nullid = PyInt_FromLong(-1);
+ if (nullid == NULL || PyList_Append(heads, nullid) == -1) {
+ Py_XDECREF(nullid);
+ goto bail;
+ }
+ goto done;
+ }
+
+ nothead = calloc(len, 1);
+ if (nothead == NULL) {
+ PyErr_NoMemory();
+ goto bail;
+ }
+
+ for (i = len - 1; i >= 0; i--) {
+ int isfiltered;
+ int parents[2];
+
+ /* If nothead[i] == 1, it means we've seen an unfiltered child of this
+ * node already, and therefore this node is not filtered. So we can skip
+ * the expensive check_filter step.
+ */
+ if (nothead[i] != 1) {
+ isfiltered = check_filter(filter, i);
+ if (isfiltered == -1) {
+ PyErr_SetString(PyExc_TypeError,
+ "unable to check filter");
+ goto bail;
+ }
+
+ if (isfiltered) {
+ nothead[i] = 1;
+ continue;
+ }
+ }
+
+ if (index_get_parents(self, i, parents, (int)len - 1) < 0)
+ goto bail;
+ for (j = 0; j < 2; j++) {
+ if (parents[j] >= 0)
+ nothead[parents[j]] = 1;
+ }
+ }
+
+ for (i = 0; i < len; i++) {
+ PyObject *head;
+
+ if (nothead[i])
+ continue;
+ head = PyInt_FromSsize_t(i);
+ if (head == NULL || PyList_Append(heads, head) == -1) {
+ Py_XDECREF(head);
+ goto bail;
+ }
+ }
+
+done:
+ self->headrevs = heads;
+ Py_XDECREF(filter);
+ free(nothead);
+ return list_copy(self->headrevs);
+bail:
+ Py_XDECREF(filter);
+ Py_XDECREF(heads);
+ free(nothead);
+ return NULL;
+}
+
+static inline int nt_level(const char *node, Py_ssize_t level)
+{
+ int v = node[level>>1];
+ if (!(level & 1))
+ v >>= 4;
+ return v & 0xf;
+}
+
+/*
+ * Return values:
+ *
+ * -4: match is ambiguous (multiple candidates)
+ * -2: not found
+ * rest: valid rev
+ */
+static int nt_find(indexObject *self, const char *node, Py_ssize_t nodelen,
+ int hex)
+{
+ int (*getnybble)(const char *, Py_ssize_t) = hex ? hexdigit : nt_level;
+ int level, maxlevel, off;
+
+ if (nodelen == 20 && node[0] == '\0' && memcmp(node, nullid, 20) == 0)
+ return -1;
+
+ if (self->nt == NULL)
+ return -2;
+
+ if (hex)
+ maxlevel = nodelen > 40 ? 40 : (int)nodelen;
+ else
+ maxlevel = nodelen > 20 ? 40 : ((int)nodelen * 2);
+
+ for (level = off = 0; level < maxlevel; level++) {
+ int k = getnybble(node, level);
+ nodetree *n = &self->nt[off];
+ int v = n->children[k];
+
+ if (v < 0) {
+ const char *n;
+ Py_ssize_t i;
+
+ v = -(v + 1);
+ n = index_node(self, v);
+ if (n == NULL)
+ return -2;
+ for (i = level; i < maxlevel; i++)
+ if (getnybble(node, i) != nt_level(n, i))
+ return -2;
+ return v;
+ }
+ if (v == 0)
+ return -2;
+ off = v;
+ }
+ /* multiple matches against an ambiguous prefix */
+ return -4;
+}
+
+static int nt_new(indexObject *self)
+{
+ if (self->ntlength == self->ntcapacity) {
+ if (self->ntcapacity >= INT_MAX / (sizeof(nodetree) * 2)) {
+ PyErr_SetString(PyExc_MemoryError,
+ "overflow in nt_new");
+ return -1;
+ }
+ self->ntcapacity *= 2;
+ self->nt = realloc(self->nt,
+ self->ntcapacity * sizeof(nodetree));
+ if (self->nt == NULL) {
+ PyErr_SetString(PyExc_MemoryError, "out of memory");
+ return -1;
+ }
+ memset(&self->nt[self->ntlength], 0,
+ sizeof(nodetree) * (self->ntcapacity - self->ntlength));
+ }
+ return self->ntlength++;
+}
+
+static int nt_insert(indexObject *self, const char *node, int rev)
+{
+ int level = 0;
+ int off = 0;
+
+ while (level < 40) {
+ int k = nt_level(node, level);
+ nodetree *n;
+ int v;
+
+ n = &self->nt[off];
+ v = n->children[k];
+
+ if (v == 0) {
+ n->children[k] = -rev - 1;
+ return 0;
+ }
+ if (v < 0) {
+ const char *oldnode = index_node(self, -(v + 1));
+ int noff;
+
+ if (!oldnode || !memcmp(oldnode, node, 20)) {
+ n->children[k] = -rev - 1;
+ return 0;
+ }
+ noff = nt_new(self);
+ if (noff == -1)
+ return -1;
+ /* self->nt may have been changed by realloc */
+ self->nt[off].children[k] = noff;
+ off = noff;
+ n = &self->nt[off];
+ n->children[nt_level(oldnode, ++level)] = v;
+ if (level > self->ntdepth)
+ self->ntdepth = level;
+ self->ntsplits += 1;
+ } else {
+ level += 1;
+ off = v;
+ }
+ }
+
+ return -1;
+}
+
+static int nt_init(indexObject *self)
+{
+ if (self->nt == NULL) {
+ if ((size_t)self->raw_length > INT_MAX / sizeof(nodetree)) {
+ PyErr_SetString(PyExc_ValueError, "overflow in nt_init");
+ return -1;
+ }
+ self->ntcapacity = self->raw_length < 4
+ ? 4 : (int)self->raw_length / 2;
+
+ self->nt = calloc(self->ntcapacity, sizeof(nodetree));
+ if (self->nt == NULL) {
+ PyErr_NoMemory();
+ return -1;
+ }
+ self->ntlength = 1;
+ self->ntrev = (int)index_length(self) - 1;
+ self->ntlookups = 1;
+ self->ntmisses = 0;
+ if (nt_insert(self, nullid, INT_MAX) == -1)
+ return -1;
+ }
+ return 0;
+}
+
+/*
+ * Return values:
+ *
+ * -3: error (exception set)
+ * -2: not found (no exception set)
+ * rest: valid rev
+ */
+static int index_find_node(indexObject *self,
+ const char *node, Py_ssize_t nodelen)
+{
+ int rev;
+
+ self->ntlookups++;
+ rev = nt_find(self, node, nodelen, 0);
+ if (rev >= -1)
+ return rev;
+
+ if (nt_init(self) == -1)
+ return -3;
+
+ /*
+ * For the first handful of lookups, we scan the entire index,
+ * and cache only the matching nodes. This optimizes for cases
+ * like "hg tip", where only a few nodes are accessed.
+ *
+ * After that, we cache every node we visit, using a single
+ * scan amortized over multiple lookups. This gives the best
+ * bulk performance, e.g. for "hg log".
+ */
+ if (self->ntmisses++ < 4) {
+ for (rev = self->ntrev - 1; rev >= 0; rev--) {
+ const char *n = index_node(self, rev);
+ if (n == NULL)
+ return -2;
+ if (memcmp(node, n, nodelen > 20 ? 20 : nodelen) == 0) {
+ if (nt_insert(self, n, rev) == -1)
+ return -3;
+ break;
+ }
+ }
+ } else {
+ for (rev = self->ntrev - 1; rev >= 0; rev--) {
+ const char *n = index_node(self, rev);
+ if (n == NULL) {
+ self->ntrev = rev + 1;
+ return -2;
+ }
+ if (nt_insert(self, n, rev) == -1) {
+ self->ntrev = rev + 1;
+ return -3;
+ }
+ if (memcmp(node, n, nodelen > 20 ? 20 : nodelen) == 0) {
+ break;
+ }
+ }
+ self->ntrev = rev;
+ }
+
+ if (rev >= 0)
+ return rev;
+ return -2;
+}
+
+static void raise_revlog_error(void)
+{
+ PyObject *mod = NULL, *dict = NULL, *errclass = NULL;
+
+ mod = PyImport_ImportModule("mercurial.error");
+ if (mod == NULL) {
+ goto cleanup;
+ }
+
+ dict = PyModule_GetDict(mod);
+ if (dict == NULL) {
+ goto cleanup;
+ }
+ Py_INCREF(dict);
+
+ errclass = PyDict_GetItemString(dict, "RevlogError");
+ if (errclass == NULL) {
+ PyErr_SetString(PyExc_SystemError,
+ "could not find RevlogError");
+ goto cleanup;
+ }
+
+ /* value of exception is ignored by callers */
+ PyErr_SetString(errclass, "RevlogError");
+
+cleanup:
+ Py_XDECREF(dict);
+ Py_XDECREF(mod);
+}
+
+static PyObject *index_getitem(indexObject *self, PyObject *value)
+{
+ char *node;
+ Py_ssize_t nodelen;
+ int rev;
+
+ if (PyInt_Check(value))
+ return index_get(self, PyInt_AS_LONG(value));
+
+ if (node_check(value, &node, &nodelen) == -1)
+ return NULL;
+ rev = index_find_node(self, node, nodelen);
+ if (rev >= -1)
+ return PyInt_FromLong(rev);
+ if (rev == -2)
+ raise_revlog_error();
+ return NULL;
+}
+
+static int nt_partialmatch(indexObject *self, const char *node,
+ Py_ssize_t nodelen)
+{
+ int rev;
+
+ if (nt_init(self) == -1)
+ return -3;
+
+ if (self->ntrev > 0) {
+ /* ensure that the radix tree is fully populated */
+ for (rev = self->ntrev - 1; rev >= 0; rev--) {
+ const char *n = index_node(self, rev);
+ if (n == NULL)
+ return -2;
+ if (nt_insert(self, n, rev) == -1)
+ return -3;
+ }
+ self->ntrev = rev;
+ }
+
+ return nt_find(self, node, nodelen, 1);
+}
+
+static PyObject *index_partialmatch(indexObject *self, PyObject *args)
+{
+ const char *fullnode;
+ int nodelen;
+ char *node;
+ int rev, i;
+
+ if (!PyArg_ParseTuple(args, "s#", &node, &nodelen))
+ return NULL;
+
+ if (nodelen < 4) {
+ PyErr_SetString(PyExc_ValueError, "key too short");
+ return NULL;
+ }
+
+ if (nodelen > 40) {
+ PyErr_SetString(PyExc_ValueError, "key too long");
+ return NULL;
+ }
+
+ for (i = 0; i < nodelen; i++)
+ hexdigit(node, i);
+ if (PyErr_Occurred()) {
+ /* input contains non-hex characters */
+ PyErr_Clear();
+ Py_RETURN_NONE;
+ }
+
+ rev = nt_partialmatch(self, node, nodelen);
+
+ switch (rev) {
+ case -4:
+ raise_revlog_error();
+ case -3:
+ return NULL;
+ case -2:
+ Py_RETURN_NONE;
+ case -1:
+ return PyBytes_FromStringAndSize(nullid, 20);
+ }
+
+ fullnode = index_node(self, rev);
+ if (fullnode == NULL) {
+ PyErr_Format(PyExc_IndexError,
+ "could not access rev %d", rev);
+ return NULL;
+ }
+ return PyBytes_FromStringAndSize(fullnode, 20);
+}
+
+static PyObject *index_m_get(indexObject *self, PyObject *args)
+{
+ Py_ssize_t nodelen;
+ PyObject *val;
+ char *node;
+ int rev;
+
+ if (!PyArg_ParseTuple(args, "O", &val))
+ return NULL;
+ if (node_check(val, &node, &nodelen) == -1)
+ return NULL;
+ rev = index_find_node(self, node, nodelen);
+ if (rev == -3)
+ return NULL;
+ if (rev == -2)
+ Py_RETURN_NONE;
+ return PyInt_FromLong(rev);
+}
+
+static int index_contains(indexObject *self, PyObject *value)
+{
+ char *node;
+ Py_ssize_t nodelen;
+
+ if (PyInt_Check(value)) {
+ long rev = PyInt_AS_LONG(value);
+ return rev >= -1 && rev < index_length(self);
+ }
+
+ if (node_check(value, &node, &nodelen) == -1)
+ return -1;
+
+ switch (index_find_node(self, node, nodelen)) {
+ case -3:
+ return -1;
+ case -2:
+ return 0;
+ default:
+ return 1;
+ }
+}
+
+typedef uint64_t bitmask;
+
+/*
+ * Given a disjoint set of revs, return all candidates for the
+ * greatest common ancestor. In revset notation, this is the set
+ * "heads(::a and ::b and ...)"
+ */
+static PyObject *find_gca_candidates(indexObject *self, const int *revs,
+ int revcount)
+{
+ const bitmask allseen = (1ull << revcount) - 1;
+ const bitmask poison = 1ull << revcount;
+ PyObject *gca = PyList_New(0);
+ int i, v, interesting;
+ int maxrev = -1;
+ bitmask sp;
+ bitmask *seen;
+
+ if (gca == NULL)
+ return PyErr_NoMemory();
+
+ for (i = 0; i < revcount; i++) {
+ if (revs[i] > maxrev)
+ maxrev = revs[i];
+ }
+
+ seen = calloc(sizeof(*seen), maxrev + 1);
+ if (seen == NULL) {
+ Py_DECREF(gca);
+ return PyErr_NoMemory();
+ }
+
+ for (i = 0; i < revcount; i++)
+ seen[revs[i]] = 1ull << i;
+
+ interesting = revcount;
+
+ for (v = maxrev; v >= 0 && interesting; v--) {
+ bitmask sv = seen[v];
+ int parents[2];
+
+ if (!sv)
+ continue;
+
+ if (sv < poison) {
+ interesting -= 1;
+ if (sv == allseen) {
+ PyObject *obj = PyInt_FromLong(v);
+ if (obj == NULL)
+ goto bail;
+ if (PyList_Append(gca, obj) == -1) {
+ Py_DECREF(obj);
+ goto bail;
+ }
+ sv |= poison;
+ for (i = 0; i < revcount; i++) {
+ if (revs[i] == v)
+ goto done;
+ }
+ }
+ }
+ if (index_get_parents(self, v, parents, maxrev) < 0)
+ goto bail;
+
+ for (i = 0; i < 2; i++) {
+ int p = parents[i];
+ if (p == -1)
+ continue;
+ sp = seen[p];
+ if (sv < poison) {
+ if (sp == 0) {
+ seen[p] = sv;
+ interesting++;
+ }
+ else if (sp != sv)
+ seen[p] |= sv;
+ } else {
+ if (sp && sp < poison)
+ interesting--;
+ seen[p] = sv;
+ }
+ }
+ }
+
+done:
+ free(seen);
+ return gca;
+bail:
+ free(seen);
+ Py_XDECREF(gca);
+ return NULL;
+}
+
+/*
+ * Given a disjoint set of revs, return the subset with the longest
+ * path to the root.
+ */
+static PyObject *find_deepest(indexObject *self, PyObject *revs)
+{
+ const Py_ssize_t revcount = PyList_GET_SIZE(revs);
+ static const Py_ssize_t capacity = 24;
+ int *depth, *interesting = NULL;
+ int i, j, v, ninteresting;
+ PyObject *dict = NULL, *keys = NULL;
+ long *seen = NULL;
+ int maxrev = -1;
+ long final;
+
+ if (revcount > capacity) {
+ PyErr_Format(PyExc_OverflowError,
+ "bitset size (%ld) > capacity (%ld)",
+ (long)revcount, (long)capacity);
+ return NULL;
+ }
+
+ for (i = 0; i < revcount; i++) {
+ int n = (int)PyInt_AsLong(PyList_GET_ITEM(revs, i));
+ if (n > maxrev)
+ maxrev = n;
+ }
+
+ depth = calloc(sizeof(*depth), maxrev + 1);
+ if (depth == NULL)
+ return PyErr_NoMemory();
+
+ seen = calloc(sizeof(*seen), maxrev + 1);
+ if (seen == NULL) {
+ PyErr_NoMemory();
+ goto bail;
+ }
+
+ interesting = calloc(sizeof(*interesting), 2 << revcount);
+ if (interesting == NULL) {
+ PyErr_NoMemory();
+ goto bail;
+ }
+
+ if (PyList_Sort(revs) == -1)
+ goto bail;
+
+ for (i = 0; i < revcount; i++) {
+ int n = (int)PyInt_AsLong(PyList_GET_ITEM(revs, i));
+ long b = 1l << i;
+ depth[n] = 1;
+ seen[n] = b;
+ interesting[b] = 1;
+ }
+
+ ninteresting = (int)revcount;
+
+ for (v = maxrev; v >= 0 && ninteresting > 1; v--) {
+ int dv = depth[v];
+ int parents[2];
+ long sv;
+
+ if (dv == 0)
+ continue;
+
+ sv = seen[v];
+ if (index_get_parents(self, v, parents, maxrev) < 0)
+ goto bail;
+
+ for (i = 0; i < 2; i++) {
+ int p = parents[i];
+ long sp;
+ int dp;
+
+ if (p == -1)
+ continue;
+
+ dp = depth[p];
+ sp = seen[p];
+ if (dp <= dv) {
+ depth[p] = dv + 1;
+ if (sp != sv) {
+ interesting[sv] += 1;
+ seen[p] = sv;
+ if (sp) {
+ interesting[sp] -= 1;
+ if (interesting[sp] == 0)
+ ninteresting -= 1;
+ }
+ }
+ }
+ else if (dv == dp - 1) {
+ long nsp = sp | sv;
+ if (nsp == sp)
+ continue;
+ seen[p] = nsp;
+ interesting[sp] -= 1;
+ if (interesting[sp] == 0 && interesting[nsp] > 0)
+ ninteresting -= 1;
+ interesting[nsp] += 1;
+ }
+ }
+ interesting[sv] -= 1;
+ if (interesting[sv] == 0)
+ ninteresting -= 1;
+ }
+
+ final = 0;
+ j = ninteresting;
+ for (i = 0; i < (int)(2 << revcount) && j > 0; i++) {
+ if (interesting[i] == 0)
+ continue;
+ final |= i;
+ j -= 1;
+ }
+ if (final == 0) {
+ keys = PyList_New(0);
+ goto bail;
+ }
+
+ dict = PyDict_New();
+ if (dict == NULL)
+ goto bail;
+
+ for (i = 0; i < revcount; i++) {
+ PyObject *key;
+
+ if ((final & (1 << i)) == 0)
+ continue;
+
+ key = PyList_GET_ITEM(revs, i);
+ Py_INCREF(key);
+ Py_INCREF(Py_None);
+ if (PyDict_SetItem(dict, key, Py_None) == -1) {
+ Py_DECREF(key);
+ Py_DECREF(Py_None);
+ goto bail;
+ }
+ }
+
+ keys = PyDict_Keys(dict);
+
+bail:
+ free(depth);
+ free(seen);
+ free(interesting);
+ Py_XDECREF(dict);
+
+ return keys;
+}
+
+/*
+ * Given a (possibly overlapping) set of revs, return all the
+ * common ancestors heads: heads(::args[0] and ::a[1] and ...)
+ */
+static PyObject *index_commonancestorsheads(indexObject *self, PyObject *args)
+{
+ PyObject *ret = NULL;
+ Py_ssize_t argcount, i, len;
+ bitmask repeat = 0;
+ int revcount = 0;
+ int *revs;
+
+ argcount = PySequence_Length(args);
+ revs = PyMem_Malloc(argcount * sizeof(*revs));
+ if (argcount > 0 && revs == NULL)
+ return PyErr_NoMemory();
+ len = index_length(self) - 1;
+
+ for (i = 0; i < argcount; i++) {
+ static const int capacity = 24;
+ PyObject *obj = PySequence_GetItem(args, i);
+ bitmask x;
+ long val;
+
+ if (!PyInt_Check(obj)) {
+ PyErr_SetString(PyExc_TypeError,
+ "arguments must all be ints");
+ Py_DECREF(obj);
+ goto bail;
+ }
+ val = PyInt_AsLong(obj);
+ Py_DECREF(obj);
+ if (val == -1) {
+ ret = PyList_New(0);
+ goto done;
+ }
+ if (val < 0 || val >= len) {
+ PyErr_SetString(PyExc_IndexError,
+ "index out of range");
+ goto bail;
+ }
+ /* this cheesy bloom filter lets us avoid some more
+ * expensive duplicate checks in the common set-is-disjoint
+ * case */
+ x = 1ull << (val & 0x3f);
+ if (repeat & x) {
+ int k;
+ for (k = 0; k < revcount; k++) {
+ if (val == revs[k])
+ goto duplicate;
+ }
+ }
+ else repeat |= x;
+ if (revcount >= capacity) {
+ PyErr_Format(PyExc_OverflowError,
+ "bitset size (%d) > capacity (%d)",
+ revcount, capacity);
+ goto bail;
+ }
+ revs[revcount++] = (int)val;
+ duplicate:;
+ }
+
+ if (revcount == 0) {
+ ret = PyList_New(0);
+ goto done;
+ }
+ if (revcount == 1) {
+ PyObject *obj;
+ ret = PyList_New(1);
+ if (ret == NULL)
+ goto bail;
+ obj = PyInt_FromLong(revs[0]);
+ if (obj == NULL)
+ goto bail;
+ PyList_SET_ITEM(ret, 0, obj);
+ goto done;
+ }
+
+ ret = find_gca_candidates(self, revs, revcount);
+ if (ret == NULL)
+ goto bail;
+
+done:
+ PyMem_Free(revs);
+ return ret;
+
+bail:
+ PyMem_Free(revs);
+ Py_XDECREF(ret);
+ return NULL;
+}
+
+/*
+ * Given a (possibly overlapping) set of revs, return the greatest
+ * common ancestors: those with the longest path to the root.
+ */
+static PyObject *index_ancestors(indexObject *self, PyObject *args)
+{
+ PyObject *ret;
+ PyObject *gca = index_commonancestorsheads(self, args);
+ if (gca == NULL)
+ return NULL;
+
+ if (PyList_GET_SIZE(gca) <= 1) {
+ return gca;
+ }
+
+ ret = find_deepest(self, gca);
+ Py_DECREF(gca);
+ return ret;
+}
+
+/*
+ * Invalidate any trie entries introduced by added revs.
+ */
+static void nt_invalidate_added(indexObject *self, Py_ssize_t start)
+{
+ Py_ssize_t i, len = PyList_GET_SIZE(self->added);
+
+ for (i = start; i < len; i++) {
+ PyObject *tuple = PyList_GET_ITEM(self->added, i);
+ PyObject *node = PyTuple_GET_ITEM(tuple, 7);
+
+ nt_insert(self, PyBytes_AS_STRING(node), -1);
+ }
+
+ if (start == 0)
+ Py_CLEAR(self->added);
+}
+
+/*
+ * Delete a numeric range of revs, which must be at the end of the
+ * range, but exclude the sentinel nullid entry.
+ */
+static int index_slice_del(indexObject *self, PyObject *item)
+{
+ Py_ssize_t start, stop, step, slicelength;
+ Py_ssize_t length = index_length(self);
+ int ret = 0;
+
+/* Argument changed from PySliceObject* to PyObject* in Python 3. */
+#ifdef IS_PY3K
+ if (PySlice_GetIndicesEx(item, length,
+#else
+ if (PySlice_GetIndicesEx((PySliceObject*)item, length,
+#endif
+ &start, &stop, &step, &slicelength) < 0)
+ return -1;
+
+ if (slicelength <= 0)
+ return 0;
+
+ if ((step < 0 && start < stop) || (step > 0 && start > stop))
+ stop = start;
+
+ if (step < 0) {
+ stop = start + 1;
+ start = stop + step*(slicelength - 1) - 1;
+ step = -step;
+ }
+
+ if (step != 1) {
+ PyErr_SetString(PyExc_ValueError,
+ "revlog index delete requires step size of 1");
+ return -1;
+ }
+
+ if (stop != length - 1) {
+ PyErr_SetString(PyExc_IndexError,
+ "revlog index deletion indices are invalid");
+ return -1;
+ }
+
+ if (start < self->length - 1) {
+ if (self->nt) {
+ Py_ssize_t i;
+
+ for (i = start + 1; i < self->length - 1; i++) {
+ const char *node = index_node(self, i);
+
+ if (node)
+ nt_insert(self, node, -1);
+ }
+ if (self->added)
+ nt_invalidate_added(self, 0);
+ if (self->ntrev > start)
+ self->ntrev = (int)start;
+ }
+ self->length = start + 1;
+ if (start < self->raw_length) {
+ if (self->cache) {
+ Py_ssize_t i;
+ for (i = start; i < self->raw_length; i++)
+ Py_CLEAR(self->cache[i]);
+ }
+ self->raw_length = start;
+ }
+ goto done;
+ }
+
+ if (self->nt) {
+ nt_invalidate_added(self, start - self->length + 1);
+ if (self->ntrev > start)
+ self->ntrev = (int)start;
+ }
+ if (self->added)
+ ret = PyList_SetSlice(self->added, start - self->length + 1,
+ PyList_GET_SIZE(self->added), NULL);
+done:
+ Py_CLEAR(self->headrevs);
+ return ret;
+}
+
+/*
+ * Supported ops:
+ *
+ * slice deletion
+ * string assignment (extend node->rev mapping)
+ * string deletion (shrink node->rev mapping)
+ */
+static int index_assign_subscript(indexObject *self, PyObject *item,
+ PyObject *value)
+{
+ char *node;
+ Py_ssize_t nodelen;
+ long rev;
+
+ if (PySlice_Check(item) && value == NULL)
+ return index_slice_del(self, item);
+
+ if (node_check(item, &node, &nodelen) == -1)
+ return -1;
+
+ if (value == NULL)
+ return self->nt ? nt_insert(self, node, -1) : 0;
+ rev = PyInt_AsLong(value);
+ if (rev > INT_MAX || rev < 0) {
+ if (!PyErr_Occurred())
+ PyErr_SetString(PyExc_ValueError, "rev out of range");
+ return -1;
+ }
+
+ if (nt_init(self) == -1)
+ return -1;
+ return nt_insert(self, node, (int)rev);
+}
+
+/*
+ * Find all RevlogNG entries in an index that has inline data. Update
+ * the optional "offsets" table with those entries.
+ */
+static Py_ssize_t inline_scan(indexObject *self, const char **offsets)
+{
+ const char *data = (const char *)self->buf.buf;
+ Py_ssize_t pos = 0;
+ Py_ssize_t end = self->buf.len;
+ long incr = v1_hdrsize;
+ Py_ssize_t len = 0;
+
+ while (pos + v1_hdrsize <= end && pos >= 0) {
+ uint32_t comp_len;
+ /* 3rd element of header is length of compressed inline data */
+ comp_len = getbe32(data + pos + 8);
+ incr = v1_hdrsize + comp_len;
+ if (offsets)
+ offsets[len] = data + pos;
+ len++;
+ pos += incr;
+ }
+
+ if (pos != end) {
+ if (!PyErr_Occurred())
+ PyErr_SetString(PyExc_ValueError, "corrupt index file");
+ return -1;
+ }
+
+ return len;
+}
+
+static int index_init(indexObject *self, PyObject *args)
+{
+ PyObject *data_obj, *inlined_obj;
+ Py_ssize_t size;
+
+ /* Initialize before argument-checking to avoid index_dealloc() crash. */
+ self->raw_length = 0;
+ self->added = NULL;
+ self->cache = NULL;
+ self->data = NULL;
+ memset(&self->buf, 0, sizeof(self->buf));
+ self->headrevs = NULL;
+ self->filteredrevs = Py_None;
+ Py_INCREF(Py_None);
+ self->nt = NULL;
+ self->offsets = NULL;
+
+ if (!PyArg_ParseTuple(args, "OO", &data_obj, &inlined_obj))
+ return -1;
+ if (!PyObject_CheckBuffer(data_obj)) {
+ PyErr_SetString(PyExc_TypeError,
+ "data does not support buffer interface");
+ return -1;
+ }
+
+ if (PyObject_GetBuffer(data_obj, &self->buf, PyBUF_SIMPLE) == -1)
+ return -1;
+ size = self->buf.len;
+
+ self->inlined = inlined_obj && PyObject_IsTrue(inlined_obj);
+ self->data = data_obj;
+
+ self->ntlength = self->ntcapacity = 0;
+ self->ntdepth = self->ntsplits = 0;
+ self->ntlookups = self->ntmisses = 0;
+ self->ntrev = -1;
+ Py_INCREF(self->data);
+
+ if (self->inlined) {
+ Py_ssize_t len = inline_scan(self, NULL);
+ if (len == -1)
+ goto bail;
+ self->raw_length = len;
+ self->length = len + 1;
+ } else {
+ if (size % v1_hdrsize) {
+ PyErr_SetString(PyExc_ValueError, "corrupt index file");
+ goto bail;
+ }
+ self->raw_length = size / v1_hdrsize;
+ self->length = self->raw_length + 1;
+ }
+
+ return 0;
+bail:
+ return -1;
+}
+
+static PyObject *index_nodemap(indexObject *self)
+{
+ Py_INCREF(self);
+ return (PyObject *)self;
+}
+
+static void index_dealloc(indexObject *self)
+{
+ _index_clearcaches(self);
+ Py_XDECREF(self->filteredrevs);
+ if (self->buf.buf) {
+ PyBuffer_Release(&self->buf);
+ memset(&self->buf, 0, sizeof(self->buf));
+ }
+ Py_XDECREF(self->data);
+ Py_XDECREF(self->added);
+ PyObject_Del(self);
+}
+
+static PySequenceMethods index_sequence_methods = {
+ (lenfunc)index_length, /* sq_length */
+ 0, /* sq_concat */
+ 0, /* sq_repeat */
+ (ssizeargfunc)index_get, /* sq_item */
+ 0, /* sq_slice */
+ 0, /* sq_ass_item */
+ 0, /* sq_ass_slice */
+ (objobjproc)index_contains, /* sq_contains */
+};
+
+static PyMappingMethods index_mapping_methods = {
+ (lenfunc)index_length, /* mp_length */
+ (binaryfunc)index_getitem, /* mp_subscript */
+ (objobjargproc)index_assign_subscript, /* mp_ass_subscript */
+};
+
+static PyMethodDef index_methods[] = {
+ {"ancestors", (PyCFunction)index_ancestors, METH_VARARGS,
+ "return the gca set of the given revs"},
+ {"commonancestorsheads", (PyCFunction)index_commonancestorsheads,
+ METH_VARARGS,
+ "return the heads of the common ancestors of the given revs"},
+ {"clearcaches", (PyCFunction)index_clearcaches, METH_NOARGS,
+ "clear the index caches"},
+ {"get", (PyCFunction)index_m_get, METH_VARARGS,
+ "get an index entry"},
+ {"computephasesmapsets", (PyCFunction)compute_phases_map_sets,
+ METH_VARARGS, "compute phases"},
+ {"reachableroots2", (PyCFunction)reachableroots2, METH_VARARGS,
+ "reachableroots"},
+ {"headrevs", (PyCFunction)index_headrevs, METH_VARARGS,
+ "get head revisions"}, /* Can do filtering since 3.2 */
+ {"headrevsfiltered", (PyCFunction)index_headrevs, METH_VARARGS,
+ "get filtered head revisions"}, /* Can always do filtering */
+ {"insert", (PyCFunction)index_insert, METH_VARARGS,
+ "insert an index entry"},
+ {"partialmatch", (PyCFunction)index_partialmatch, METH_VARARGS,
+ "match a potentially ambiguous node ID"},
+ {"stats", (PyCFunction)index_stats, METH_NOARGS,
+ "stats for the index"},
+ {NULL} /* Sentinel */
+};
+
+static PyGetSetDef index_getset[] = {
+ {"nodemap", (getter)index_nodemap, NULL, "nodemap", NULL},
+ {NULL} /* Sentinel */
+};
+
+static PyTypeObject indexType = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "parsers.index", /* tp_name */
+ sizeof(indexObject), /* tp_basicsize */
+ 0, /* tp_itemsize */
+ (destructor)index_dealloc, /* tp_dealloc */
+ 0, /* tp_print */
+ 0, /* tp_getattr */
+ 0, /* tp_setattr */
+ 0, /* tp_compare */
+ 0, /* tp_repr */
+ 0, /* tp_as_number */
+ &index_sequence_methods, /* tp_as_sequence */
+ &index_mapping_methods, /* tp_as_mapping */
+ 0, /* tp_hash */
+ 0, /* tp_call */
+ 0, /* tp_str */
+ 0, /* tp_getattro */
+ 0, /* tp_setattro */
+ 0, /* tp_as_buffer */
+ Py_TPFLAGS_DEFAULT, /* tp_flags */
+ "revlog index", /* tp_doc */
+ 0, /* tp_traverse */
+ 0, /* tp_clear */
+ 0, /* tp_richcompare */
+ 0, /* tp_weaklistoffset */
+ 0, /* tp_iter */
+ 0, /* tp_iternext */
+ index_methods, /* tp_methods */
+ 0, /* tp_members */
+ index_getset, /* tp_getset */
+ 0, /* tp_base */
+ 0, /* tp_dict */
+ 0, /* tp_descr_get */
+ 0, /* tp_descr_set */
+ 0, /* tp_dictoffset */
+ (initproc)index_init, /* tp_init */
+ 0, /* tp_alloc */
+};
+
+/*
+ * returns a tuple of the form (index, index, cache) with elements as
+ * follows:
+ *
+ * index: an index object that lazily parses RevlogNG records
+ * cache: if data is inlined, a tuple (0, index_file_content), else None
+ * index_file_content could be a string, or a buffer
+ *
+ * added complications are for backwards compatibility
+ */
+PyObject *parse_index2(PyObject *self, PyObject *args)
+{
+ PyObject *tuple = NULL, *cache = NULL;
+ indexObject *idx;
+ int ret;
+
+ idx = PyObject_New(indexObject, &indexType);
+ if (idx == NULL)
+ goto bail;
+
+ ret = index_init(idx, args);
+ if (ret == -1)
+ goto bail;
+
+ if (idx->inlined) {
+ cache = Py_BuildValue("iO", 0, idx->data);
+ if (cache == NULL)
+ goto bail;
+ } else {
+ cache = Py_None;
+ Py_INCREF(cache);
+ }
+
+ tuple = Py_BuildValue("NN", idx, cache);
+ if (!tuple)
+ goto bail;
+ return tuple;
+
+bail:
+ Py_XDECREF(idx);
+ Py_XDECREF(cache);
+ Py_XDECREF(tuple);
+ return NULL;
+}
+
+void revlog_module_init(PyObject *mod)
+{
+ indexType.tp_new = PyType_GenericNew;
+ if (PyType_Ready(&indexType) < 0)
+ return;
+ Py_INCREF(&indexType);
+ PyModule_AddObject(mod, "index", (PyObject *)&indexType);
+
+ nullentry = Py_BuildValue("iiiiiiis#", 0, 0, 0,
+ -1, -1, -1, -1, nullid, 20);
+ if (nullentry)
+ PyObject_GC_UnTrack(nullentry);
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cext/util.h Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,76 @@
+/*
+ util.h - utility functions for interfacing with the various python APIs.
+
+ This software may be used and distributed according to the terms of
+ the GNU General Public License, incorporated herein by reference.
+*/
+
+#ifndef _HG_UTIL_H_
+#define _HG_UTIL_H_
+
+#include "compat.h"
+
+#if PY_MAJOR_VERSION >= 3
+#define IS_PY3K
+#endif
+
+typedef struct {
+ PyObject_HEAD
+ char state;
+ int mode;
+ int size;
+ int mtime;
+} dirstateTupleObject;
+
+extern PyTypeObject dirstateTupleType;
+#define dirstate_tuple_check(op) (Py_TYPE(op) == &dirstateTupleType)
+
+/* This should be kept in sync with normcasespecs in encoding.py. */
+enum normcase_spec {
+ NORMCASE_LOWER = -1,
+ NORMCASE_UPPER = 1,
+ NORMCASE_OTHER = 0
+};
+
+#define MIN(a, b) (((a)<(b))?(a):(b))
+/* VC9 doesn't include bool and lacks stdbool.h based on my searching */
+#if defined(_MSC_VER) || __STDC_VERSION__ < 199901L
+#define true 1
+#define false 0
+typedef unsigned char bool;
+#else
+#include <stdbool.h>
+#endif
+
+static const int8_t hextable[256] = {
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, -1, -1, -1, -1, -1, -1, /* 0-9 */
+ -1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1, /* A-F */
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1, /* a-f */
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1
+};
+
+static inline int hexdigit(const char *p, Py_ssize_t off)
+{
+ int8_t val = hextable[(unsigned char)p[off]];
+
+ if (val >= 0) {
+ return val;
+ }
+
+ PyErr_SetString(PyExc_ValueError, "input contains non-hex character");
+ return 0;
+}
+
+#endif /* _HG_UTIL_H_ */
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cffi/base85.py Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,10 @@
+# base85.py: pure python base85 codec
+#
+# Copyright (C) 2009 Brendan Cully <brendan@kublai.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from __future__ import absolute_import
+
+from ..pure.base85 import *
--- a/mercurial/cffi/bdiff.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/cffi/bdiff.py Tue Jun 20 16:33:46 2017 -0400
@@ -1,31 +1,76 @@
+# bdiff.py - CFFI implementation of bdiff.c
+#
+# Copyright 2016 Maciej Fijalkowski <fijall@gmail.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
from __future__ import absolute_import
-import cffi
-import os
+import struct
-ffi = cffi.FFI()
-ffi.set_source("_bdiff_cffi",
- open(os.path.join(os.path.join(os.path.dirname(__file__), '..'),
- 'bdiff.c')).read(), include_dirs=['mercurial'])
-ffi.cdef("""
-struct bdiff_line {
- int hash, n, e;
- ssize_t len;
- const char *l;
-};
+from ..pure.bdiff import *
+from . import _bdiff
+
+ffi = _bdiff.ffi
+lib = _bdiff.lib
-struct bdiff_hunk;
-struct bdiff_hunk {
- int a1, a2, b1, b2;
- struct bdiff_hunk *next;
-};
+def blocks(sa, sb):
+ a = ffi.new("struct bdiff_line**")
+ b = ffi.new("struct bdiff_line**")
+ ac = ffi.new("char[]", str(sa))
+ bc = ffi.new("char[]", str(sb))
+ l = ffi.new("struct bdiff_hunk*")
+ try:
+ an = lib.bdiff_splitlines(ac, len(sa), a)
+ bn = lib.bdiff_splitlines(bc, len(sb), b)
+ if not a[0] or not b[0]:
+ raise MemoryError
+ count = lib.bdiff_diff(a[0], an, b[0], bn, l)
+ if count < 0:
+ raise MemoryError
+ rl = [None] * count
+ h = l.next
+ i = 0
+ while h:
+ rl[i] = (h.a1, h.a2, h.b1, h.b2)
+ h = h.next
+ i += 1
+ finally:
+ lib.free(a[0])
+ lib.free(b[0])
+ lib.bdiff_freehunks(l.next)
+ return rl
-int bdiff_splitlines(const char *a, ssize_t len, struct bdiff_line **lr);
-int bdiff_diff(struct bdiff_line *a, int an, struct bdiff_line *b, int bn,
- struct bdiff_hunk *base);
-void bdiff_freehunks(struct bdiff_hunk *l);
-void free(void*);
-""")
+def bdiff(sa, sb):
+ a = ffi.new("struct bdiff_line**")
+ b = ffi.new("struct bdiff_line**")
+ ac = ffi.new("char[]", str(sa))
+ bc = ffi.new("char[]", str(sb))
+ l = ffi.new("struct bdiff_hunk*")
+ try:
+ an = lib.bdiff_splitlines(ac, len(sa), a)
+ bn = lib.bdiff_splitlines(bc, len(sb), b)
+ if not a[0] or not b[0]:
+ raise MemoryError
+ count = lib.bdiff_diff(a[0], an, b[0], bn, l)
+ if count < 0:
+ raise MemoryError
+ rl = []
+ h = l.next
+ la = lb = 0
+ while h:
+ if h.a1 != la or h.b1 != lb:
+ lgt = (b[0] + h.b1).l - (b[0] + lb).l
+ rl.append(struct.pack(">lll", (a[0] + la).l - a[0].l,
+ (a[0] + h.a1).l - a[0].l, lgt))
+ rl.append(str(ffi.buffer((b[0] + lb).l, lgt)))
+ la = h.a2
+ lb = h.b2
+ h = h.next
-if __name__ == '__main__':
- ffi.compile()
+ finally:
+ lib.free(a[0])
+ lib.free(b[0])
+ lib.bdiff_freehunks(l.next)
+ return "".join(rl)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cffi/bdiffbuild.py Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,31 @@
+from __future__ import absolute_import
+
+import cffi
+import os
+
+ffi = cffi.FFI()
+ffi.set_source("mercurial.cffi._bdiff",
+ open(os.path.join(os.path.join(os.path.dirname(__file__), '..'),
+ 'bdiff.c')).read(), include_dirs=['mercurial'])
+ffi.cdef("""
+struct bdiff_line {
+ int hash, n, e;
+ ssize_t len;
+ const char *l;
+};
+
+struct bdiff_hunk;
+struct bdiff_hunk {
+ int a1, a2, b1, b2;
+ struct bdiff_hunk *next;
+};
+
+int bdiff_splitlines(const char *a, ssize_t len, struct bdiff_line **lr);
+int bdiff_diff(struct bdiff_line *a, int an, struct bdiff_line *b, int bn,
+ struct bdiff_hunk *base);
+void bdiff_freehunks(struct bdiff_hunk *l);
+void free(void*);
+""")
+
+if __name__ == '__main__':
+ ffi.compile()
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cffi/diffhelpers.py Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,10 @@
+# diffhelpers.py - pure Python implementation of diffhelpers.c
+#
+# Copyright 2009 Matt Mackall <mpm@selenic.com> and others
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from __future__ import absolute_import
+
+from ..pure.diffhelpers import *
--- a/mercurial/cffi/mpatch.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/cffi/mpatch.py Tue Jun 20 16:33:46 2017 -0400
@@ -1,35 +1,48 @@
+# mpatch.py - CFFI implementation of mpatch.c
+#
+# Copyright 2016 Maciej Fijalkowski <fijall@gmail.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
from __future__ import absolute_import
-import cffi
-import os
+from ..pure.mpatch import *
+from ..pure.mpatch import mpatchError # silence pyflakes
+from . import _mpatch
+
+ffi = _mpatch.ffi
+lib = _mpatch.lib
-ffi = cffi.FFI()
-mpatch_c = os.path.join(os.path.join(os.path.dirname(__file__), '..',
- 'mpatch.c'))
-ffi.set_source("_mpatch_cffi", open(mpatch_c).read(),
- include_dirs=["mercurial"])
-ffi.cdef("""
-
-struct mpatch_frag {
- int start, end, len;
- const char *data;
-};
+@ffi.def_extern()
+def cffi_get_next_item(arg, pos):
+ all, bins = ffi.from_handle(arg)
+ container = ffi.new("struct mpatch_flist*[1]")
+ to_pass = ffi.new("char[]", str(bins[pos]))
+ all.append(to_pass)
+ r = lib.mpatch_decode(to_pass, len(to_pass) - 1, container)
+ if r < 0:
+ return ffi.NULL
+ return container[0]
-struct mpatch_flist {
- struct mpatch_frag *base, *head, *tail;
-};
-
-extern "Python" struct mpatch_flist* cffi_get_next_item(void*, ssize_t);
-
-int mpatch_decode(const char *bin, ssize_t len, struct mpatch_flist** res);
-ssize_t mpatch_calcsize(size_t len, struct mpatch_flist *l);
-void mpatch_lfree(struct mpatch_flist *a);
-static int mpatch_apply(char *buf, const char *orig, size_t len,
- struct mpatch_flist *l);
-struct mpatch_flist *mpatch_fold(void *bins,
- struct mpatch_flist* (*get_next_item)(void*, ssize_t),
- ssize_t start, ssize_t end);
-""")
-
-if __name__ == '__main__':
- ffi.compile()
+def patches(text, bins):
+ lgt = len(bins)
+ all = []
+ if not lgt:
+ return text
+ arg = (all, bins)
+ patch = lib.mpatch_fold(ffi.new_handle(arg),
+ lib.cffi_get_next_item, 0, lgt)
+ if not patch:
+ raise mpatchError("cannot decode chunk")
+ outlen = lib.mpatch_calcsize(len(text), patch)
+ if outlen < 0:
+ lib.mpatch_lfree(patch)
+ raise mpatchError("inconsistency detected")
+ buf = ffi.new("char[]", outlen)
+ if lib.mpatch_apply(buf, text, len(text), patch) < 0:
+ lib.mpatch_lfree(patch)
+ raise mpatchError("error applying patches")
+ res = ffi.buffer(buf, outlen)[:]
+ lib.mpatch_lfree(patch)
+ return res
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cffi/mpatchbuild.py Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,35 @@
+from __future__ import absolute_import
+
+import cffi
+import os
+
+ffi = cffi.FFI()
+mpatch_c = os.path.join(os.path.join(os.path.dirname(__file__), '..',
+ 'mpatch.c'))
+ffi.set_source("mercurial.cffi._mpatch", open(mpatch_c).read(),
+ include_dirs=["mercurial"])
+ffi.cdef("""
+
+struct mpatch_frag {
+ int start, end, len;
+ const char *data;
+};
+
+struct mpatch_flist {
+ struct mpatch_frag *base, *head, *tail;
+};
+
+extern "Python" struct mpatch_flist* cffi_get_next_item(void*, ssize_t);
+
+int mpatch_decode(const char *bin, ssize_t len, struct mpatch_flist** res);
+ssize_t mpatch_calcsize(size_t len, struct mpatch_flist *l);
+void mpatch_lfree(struct mpatch_flist *a);
+static int mpatch_apply(char *buf, const char *orig, size_t len,
+ struct mpatch_flist *l);
+struct mpatch_flist *mpatch_fold(void *bins,
+ struct mpatch_flist* (*get_next_item)(void*, ssize_t),
+ ssize_t start, ssize_t end);
+""")
+
+if __name__ == '__main__':
+ ffi.compile()
--- a/mercurial/cffi/osutil.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/cffi/osutil.py Tue Jun 20 16:33:46 2017 -0400
@@ -1,102 +1,102 @@
+# osutil.py - CFFI version of osutil.c
+#
+# Copyright 2016 Maciej Fijalkowski <fijall@gmail.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
from __future__ import absolute_import
-import cffi
+import os
+import stat as statmod
-ffi = cffi.FFI()
-ffi.set_source("_osutil_cffi", """
-#include <sys/attr.h>
-#include <sys/vnode.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <time.h>
+from ..pure.osutil import *
+
+from .. import (
+ pycompat,
+)
-typedef struct val_attrs {
- uint32_t length;
- attribute_set_t returned;
- attrreference_t name_info;
- fsobj_type_t obj_type;
- struct timespec mtime;
- uint32_t accessmask;
- off_t datalength;
-} __attribute__((aligned(4), packed)) val_attrs_t;
-""", include_dirs=['mercurial'])
-ffi.cdef('''
+if pycompat.sysplatform == 'darwin':
+ from . import _osutil
+
+ ffi = _osutil.ffi
+ lib = _osutil.lib
+
+ listdir_batch_size = 4096
+ # tweakable number, only affects performance, which chunks
+ # of bytes do we get back from getattrlistbulk
-typedef uint32_t attrgroup_t;
+ attrkinds = [None] * 20 # we need the max no for enum VXXX, 20 is plenty
-typedef struct attrlist {
- uint16_t bitmapcount; /* number of attr. bit sets in list */
- uint16_t reserved; /* (to maintain 4-byte alignment) */
- attrgroup_t commonattr; /* common attribute group */
- attrgroup_t volattr; /* volume attribute group */
- attrgroup_t dirattr; /* directory attribute group */
- attrgroup_t fileattr; /* file attribute group */
- attrgroup_t forkattr; /* fork attribute group */
- ...;
-};
+ attrkinds[lib.VREG] = statmod.S_IFREG
+ attrkinds[lib.VDIR] = statmod.S_IFDIR
+ attrkinds[lib.VLNK] = statmod.S_IFLNK
+ attrkinds[lib.VBLK] = statmod.S_IFBLK
+ attrkinds[lib.VCHR] = statmod.S_IFCHR
+ attrkinds[lib.VFIFO] = statmod.S_IFIFO
+ attrkinds[lib.VSOCK] = statmod.S_IFSOCK
-typedef struct attribute_set {
- ...;
-} attribute_set_t;
+ class stat_res(object):
+ def __init__(self, st_mode, st_mtime, st_size):
+ self.st_mode = st_mode
+ self.st_mtime = st_mtime
+ self.st_size = st_size
-typedef struct attrreference {
- int attr_dataoffset;
- int attr_length;
- ...;
-} attrreference_t;
-
-typedef int ... off_t;
+ tv_sec_ofs = ffi.offsetof("struct timespec", "tv_sec")
+ buf = ffi.new("char[]", listdir_batch_size)
-typedef struct val_attrs {
- uint32_t length;
- attribute_set_t returned;
- attrreference_t name_info;
- uint32_t obj_type;
- struct timespec mtime;
- uint32_t accessmask;
- off_t datalength;
- ...;
-} val_attrs_t;
-
-/* the exact layout of the above struct will be figured out during build time */
-
-typedef int ... time_t;
-
-typedef struct timespec {
- time_t tv_sec;
- ...;
-};
-
-int getattrlist(const char* path, struct attrlist * attrList, void * attrBuf,
- size_t attrBufSize, unsigned int options);
-
-int getattrlistbulk(int dirfd, struct attrlist * attrList, void * attrBuf,
- size_t attrBufSize, uint64_t options);
+ def listdirinternal(dfd, req, stat, skip):
+ ret = []
+ while True:
+ r = lib.getattrlistbulk(dfd, req, buf, listdir_batch_size, 0)
+ if r == 0:
+ break
+ if r == -1:
+ raise OSError(ffi.errno, os.strerror(ffi.errno))
+ cur = ffi.cast("val_attrs_t*", buf)
+ for i in range(r):
+ lgt = cur.length
+ assert lgt == ffi.cast('uint32_t*', cur)[0]
+ ofs = cur.name_info.attr_dataoffset
+ str_lgt = cur.name_info.attr_length
+ base_ofs = ffi.offsetof('val_attrs_t', 'name_info')
+ name = str(ffi.buffer(ffi.cast("char*", cur) + base_ofs + ofs,
+ str_lgt - 1))
+ tp = attrkinds[cur.obj_type]
+ if name == "." or name == "..":
+ continue
+ if skip == name and tp == statmod.S_ISDIR:
+ return []
+ if stat:
+ mtime = cur.mtime.tv_sec
+ mode = (cur.accessmask & ~lib.S_IFMT)| tp
+ ret.append((name, tp, stat_res(st_mode=mode, st_mtime=mtime,
+ st_size=cur.datalength)))
+ else:
+ ret.append((name, tp))
+ cur = ffi.cast("val_attrs_t*", int(ffi.cast("intptr_t", cur))
+ + lgt)
+ return ret
-#define ATTR_BIT_MAP_COUNT ...
-#define ATTR_CMN_NAME ...
-#define ATTR_CMN_OBJTYPE ...
-#define ATTR_CMN_MODTIME ...
-#define ATTR_CMN_ACCESSMASK ...
-#define ATTR_CMN_ERROR ...
-#define ATTR_CMN_RETURNED_ATTRS ...
-#define ATTR_FILE_DATALENGTH ...
+ def listdir(path, stat=False, skip=None):
+ req = ffi.new("struct attrlist*")
+ req.bitmapcount = lib.ATTR_BIT_MAP_COUNT
+ req.commonattr = (lib.ATTR_CMN_RETURNED_ATTRS |
+ lib.ATTR_CMN_NAME |
+ lib.ATTR_CMN_OBJTYPE |
+ lib.ATTR_CMN_ACCESSMASK |
+ lib.ATTR_CMN_MODTIME)
+ req.fileattr = lib.ATTR_FILE_DATALENGTH
+ dfd = lib.open(path, lib.O_RDONLY, 0)
+ if dfd == -1:
+ raise OSError(ffi.errno, os.strerror(ffi.errno))
-#define VREG ...
-#define VDIR ...
-#define VLNK ...
-#define VBLK ...
-#define VCHR ...
-#define VFIFO ...
-#define VSOCK ...
-
-#define S_IFMT ...
-
-int open(const char *path, int oflag, int perm);
-int close(int);
-
-#define O_RDONLY ...
-''')
-
-if __name__ == '__main__':
- ffi.compile()
+ try:
+ ret = listdirinternal(dfd, req, stat, skip)
+ finally:
+ try:
+ lib.close(dfd)
+ except BaseException:
+ pass # we ignore all the errors from closing, not
+ # much we can do about that
+ return ret
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cffi/osutilbuild.py Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,102 @@
+from __future__ import absolute_import
+
+import cffi
+
+ffi = cffi.FFI()
+ffi.set_source("mercurial.cffi._osutil", """
+#include <sys/attr.h>
+#include <sys/vnode.h>
+#include <unistd.h>
+#include <fcntl.h>
+#include <time.h>
+
+typedef struct val_attrs {
+ uint32_t length;
+ attribute_set_t returned;
+ attrreference_t name_info;
+ fsobj_type_t obj_type;
+ struct timespec mtime;
+ uint32_t accessmask;
+ off_t datalength;
+} __attribute__((aligned(4), packed)) val_attrs_t;
+""", include_dirs=['mercurial'])
+ffi.cdef('''
+
+typedef uint32_t attrgroup_t;
+
+typedef struct attrlist {
+ uint16_t bitmapcount; /* number of attr. bit sets in list */
+ uint16_t reserved; /* (to maintain 4-byte alignment) */
+ attrgroup_t commonattr; /* common attribute group */
+ attrgroup_t volattr; /* volume attribute group */
+ attrgroup_t dirattr; /* directory attribute group */
+ attrgroup_t fileattr; /* file attribute group */
+ attrgroup_t forkattr; /* fork attribute group */
+ ...;
+};
+
+typedef struct attribute_set {
+ ...;
+} attribute_set_t;
+
+typedef struct attrreference {
+ int attr_dataoffset;
+ int attr_length;
+ ...;
+} attrreference_t;
+
+typedef int ... off_t;
+
+typedef struct val_attrs {
+ uint32_t length;
+ attribute_set_t returned;
+ attrreference_t name_info;
+ uint32_t obj_type;
+ struct timespec mtime;
+ uint32_t accessmask;
+ off_t datalength;
+ ...;
+} val_attrs_t;
+
+/* the exact layout of the above struct will be figured out during build time */
+
+typedef int ... time_t;
+
+typedef struct timespec {
+ time_t tv_sec;
+ ...;
+};
+
+int getattrlist(const char* path, struct attrlist * attrList, void * attrBuf,
+ size_t attrBufSize, unsigned int options);
+
+int getattrlistbulk(int dirfd, struct attrlist * attrList, void * attrBuf,
+ size_t attrBufSize, uint64_t options);
+
+#define ATTR_BIT_MAP_COUNT ...
+#define ATTR_CMN_NAME ...
+#define ATTR_CMN_OBJTYPE ...
+#define ATTR_CMN_MODTIME ...
+#define ATTR_CMN_ACCESSMASK ...
+#define ATTR_CMN_ERROR ...
+#define ATTR_CMN_RETURNED_ATTRS ...
+#define ATTR_FILE_DATALENGTH ...
+
+#define VREG ...
+#define VDIR ...
+#define VLNK ...
+#define VBLK ...
+#define VCHR ...
+#define VFIFO ...
+#define VSOCK ...
+
+#define S_IFMT ...
+
+int open(const char *path, int oflag, int perm);
+int close(int);
+
+#define O_RDONLY ...
+''')
+
+if __name__ == '__main__':
+ ffi.compile()
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cffi/parsers.py Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,10 @@
+# parsers.py - Python implementation of parsers.c
+#
+# Copyright 2009 Matt Mackall <mpm@selenic.com> and others
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from __future__ import absolute_import
+
+from ..pure.parsers import *
--- a/mercurial/changegroup.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/changegroup.py Tue Jun 20 16:33:46 2017 -0400
@@ -20,7 +20,6 @@
)
from . import (
- branchmap,
dagutil,
discovery,
error,
@@ -257,7 +256,7 @@
repo.ui.progress(_('manifests'), None)
self.callback = None
- def apply(self, repo, srctype, url, emptyok=False,
+ def apply(self, repo, tr, srctype, url, emptyok=False,
targetphase=phases.draft, expectedtotal=None):
"""Add the changegroup returned by source.read() to this repo.
srctype is a string like 'push', 'pull', or 'unbundle'. url is
@@ -280,169 +279,156 @@
changesets = files = revisions = 0
try:
- with repo.transaction("\n".join([srctype,
- util.hidepassword(url)])) as tr:
- # The transaction could have been created before and already
- # carries source information. In this case we use the top
- # level data. We overwrite the argument because we need to use
- # the top level value (if they exist) in this function.
- srctype = tr.hookargs.setdefault('source', srctype)
- url = tr.hookargs.setdefault('url', url)
- repo.hook('prechangegroup', throw=True, **tr.hookargs)
+ # The transaction may already carry source information. In this
+ # case we use the top level data. We overwrite the argument
+ # because we need to use the top level value (if they exist)
+ # in this function.
+ srctype = tr.hookargs.setdefault('source', srctype)
+ url = tr.hookargs.setdefault('url', url)
+ repo.hook('prechangegroup', throw=True, **tr.hookargs)
- # write changelog data to temp files so concurrent readers
- # will not see an inconsistent view
- cl = repo.changelog
- cl.delayupdate(tr)
- oldheads = set(cl.heads())
+ # write changelog data to temp files so concurrent readers
+ # will not see an inconsistent view
+ cl = repo.changelog
+ cl.delayupdate(tr)
+ oldheads = set(cl.heads())
- trp = weakref.proxy(tr)
- # pull off the changeset group
- repo.ui.status(_("adding changesets\n"))
- clstart = len(cl)
- class prog(object):
- def __init__(self, step, total):
- self._step = step
- self._total = total
- self._count = 1
- def __call__(self):
- repo.ui.progress(self._step, self._count,
- unit=_('chunks'), total=self._total)
- self._count += 1
- self.callback = prog(_('changesets'), expectedtotal)
+ trp = weakref.proxy(tr)
+ # pull off the changeset group
+ repo.ui.status(_("adding changesets\n"))
+ clstart = len(cl)
+ class prog(object):
+ def __init__(self, step, total):
+ self._step = step
+ self._total = total
+ self._count = 1
+ def __call__(self):
+ repo.ui.progress(self._step, self._count, unit=_('chunks'),
+ total=self._total)
+ self._count += 1
+ self.callback = prog(_('changesets'), expectedtotal)
- efiles = set()
- def onchangelog(cl, node):
- efiles.update(cl.readfiles(node))
+ efiles = set()
+ def onchangelog(cl, node):
+ efiles.update(cl.readfiles(node))
- self.changelogheader()
- srccontent = cl.addgroup(self, csmap, trp,
- addrevisioncb=onchangelog)
- efiles = len(efiles)
+ self.changelogheader()
+ cgnodes = cl.addgroup(self, csmap, trp, addrevisioncb=onchangelog)
+ efiles = len(efiles)
- if not (srccontent or emptyok):
- raise error.Abort(_("received changelog group is empty"))
- clend = len(cl)
- changesets = clend - clstart
- repo.ui.progress(_('changesets'), None)
- self.callback = None
+ if not (cgnodes or emptyok):
+ raise error.Abort(_("received changelog group is empty"))
+ clend = len(cl)
+ changesets = clend - clstart
+ repo.ui.progress(_('changesets'), None)
+ self.callback = None
- # pull off the manifest group
- repo.ui.status(_("adding manifests\n"))
- self._unpackmanifests(repo, revmap, trp, prog, changesets)
+ # pull off the manifest group
+ repo.ui.status(_("adding manifests\n"))
+ self._unpackmanifests(repo, revmap, trp, prog, changesets)
- needfiles = {}
- if repo.ui.configbool('server', 'validate', default=False):
- cl = repo.changelog
- ml = repo.manifestlog
- # validate incoming csets have their manifests
- for cset in xrange(clstart, clend):
- mfnode = cl.changelogrevision(cset).manifest
- mfest = ml[mfnode].readdelta()
- # store file nodes we must see
- for f, n in mfest.iteritems():
- needfiles.setdefault(f, set()).add(n)
+ needfiles = {}
+ if repo.ui.configbool('server', 'validate', default=False):
+ cl = repo.changelog
+ ml = repo.manifestlog
+ # validate incoming csets have their manifests
+ for cset in xrange(clstart, clend):
+ mfnode = cl.changelogrevision(cset).manifest
+ mfest = ml[mfnode].readdelta()
+ # store file cgnodes we must see
+ for f, n in mfest.iteritems():
+ needfiles.setdefault(f, set()).add(n)
- # process the files
- repo.ui.status(_("adding file changes\n"))
- newrevs, newfiles = _addchangegroupfiles(
- repo, self, revmap, trp, efiles, needfiles)
- revisions += newrevs
- files += newfiles
+ # process the files
+ repo.ui.status(_("adding file changes\n"))
+ newrevs, newfiles = _addchangegroupfiles(
+ repo, self, revmap, trp, efiles, needfiles)
+ revisions += newrevs
+ files += newfiles
- dh = 0
- if oldheads:
- heads = cl.heads()
- dh = len(heads) - len(oldheads)
- for h in heads:
- if h not in oldheads and repo[h].closesbranch():
- dh -= 1
- htext = ""
- if dh:
- htext = _(" (%+d heads)") % dh
+ deltaheads = 0
+ if oldheads:
+ heads = cl.heads()
+ deltaheads = len(heads) - len(oldheads)
+ for h in heads:
+ if h not in oldheads and repo[h].closesbranch():
+ deltaheads -= 1
+ htext = ""
+ if deltaheads:
+ htext = _(" (%+d heads)") % deltaheads
- repo.ui.status(_("added %d changesets"
- " with %d changes to %d files%s\n")
- % (changesets, revisions, files, htext))
- repo.invalidatevolatilesets()
+ repo.ui.status(_("added %d changesets"
+ " with %d changes to %d files%s\n")
+ % (changesets, revisions, files, htext))
+ repo.invalidatevolatilesets()
- if changesets > 0:
- if 'node' not in tr.hookargs:
- tr.hookargs['node'] = hex(cl.node(clstart))
- tr.hookargs['node_last'] = hex(cl.node(clend - 1))
- hookargs = dict(tr.hookargs)
- else:
- hookargs = dict(tr.hookargs)
- hookargs['node'] = hex(cl.node(clstart))
- hookargs['node_last'] = hex(cl.node(clend - 1))
- repo.hook('pretxnchangegroup', throw=True, **hookargs)
+ if changesets > 0:
+ if 'node' not in tr.hookargs:
+ tr.hookargs['node'] = hex(cl.node(clstart))
+ tr.hookargs['node_last'] = hex(cl.node(clend - 1))
+ hookargs = dict(tr.hookargs)
+ else:
+ hookargs = dict(tr.hookargs)
+ hookargs['node'] = hex(cl.node(clstart))
+ hookargs['node_last'] = hex(cl.node(clend - 1))
+ repo.hook('pretxnchangegroup', throw=True, **hookargs)
- added = [cl.node(r) for r in xrange(clstart, clend)]
- publishing = repo.publishing()
- if srctype in ('push', 'serve'):
- # Old servers can not push the boundary themselves.
- # New servers won't push the boundary if changeset already
- # exists locally as secret
- #
- # We should not use added here but the list of all change in
- # the bundle
- if publishing:
- phases.advanceboundary(repo, tr, phases.public,
- srccontent)
- else:
- # Those changesets have been pushed from the
- # outside, their phases are going to be pushed
- # alongside. Therefor `targetphase` is
- # ignored.
- phases.advanceboundary(repo, tr, phases.draft,
- srccontent)
- phases.retractboundary(repo, tr, phases.draft, added)
- elif srctype != 'strip':
- # publishing only alter behavior during push
- #
- # strip should not touch boundary at all
- phases.retractboundary(repo, tr, targetphase, added)
+ added = [cl.node(r) for r in xrange(clstart, clend)]
+ if srctype in ('push', 'serve'):
+ # Old servers can not push the boundary themselves.
+ # New servers won't push the boundary if changeset already
+ # exists locally as secret
+ #
+ # We should not use added here but the list of all change in
+ # the bundle
+ if repo.publishing():
+ phases.advanceboundary(repo, tr, phases.public, cgnodes)
+ else:
+ # Those changesets have been pushed from the
+ # outside, their phases are going to be pushed
+ # alongside. Therefor `targetphase` is
+ # ignored.
+ phases.advanceboundary(repo, tr, phases.draft, cgnodes)
+ phases.retractboundary(repo, tr, phases.draft, added)
+ elif srctype != 'strip':
+ # publishing only alter behavior during push
+ #
+ # strip should not touch boundary at all
+ phases.retractboundary(repo, tr, targetphase, added)
+
+ if changesets > 0:
- if changesets > 0:
- if srctype != 'strip':
- # During strip, branchcache is invalid but
- # coming call to `destroyed` will repair it.
- # In other case we can safely update cache on
- # disk.
- repo.ui.debug('updating the branch cache\n')
- branchmap.updatecache(repo.filtered('served'))
+ def runhooks():
+ # These hooks run when the lock releases, not when the
+ # transaction closes. So it's possible for the changelog
+ # to have changed since we last saw it.
+ if clstart >= len(repo):
+ return
- def runhooks():
- # These hooks run when the lock releases, not when the
- # transaction closes. So it's possible for the changelog
- # to have changed since we last saw it.
- if clstart >= len(repo):
- return
+ repo.hook("changegroup", **hookargs)
- repo.hook("changegroup", **hookargs)
-
- for n in added:
- args = hookargs.copy()
- args['node'] = hex(n)
- del args['node_last']
- repo.hook("incoming", **args)
+ for n in added:
+ args = hookargs.copy()
+ args['node'] = hex(n)
+ del args['node_last']
+ repo.hook("incoming", **args)
- newheads = [h for h in repo.heads()
- if h not in oldheads]
- repo.ui.log("incoming",
- "%s incoming changes - new heads: %s\n",
- len(added),
- ', '.join([hex(c[:6]) for c in newheads]))
+ newheads = [h for h in repo.heads()
+ if h not in oldheads]
+ repo.ui.log("incoming",
+ "%s incoming changes - new heads: %s\n",
+ len(added),
+ ', '.join([hex(c[:6]) for c in newheads]))
- tr.addpostclose('changegroup-runhooks-%020i' % clstart,
- lambda tr: repo._afterlock(runhooks))
+ tr.addpostclose('changegroup-runhooks-%020i' % clstart,
+ lambda tr: repo._afterlock(runhooks))
finally:
repo.ui.flush()
# never return 0 here:
- if dh < 0:
- return dh - 1
+ if deltaheads < 0:
+ return deltaheads - 1
else:
- return dh + 1
+ return deltaheads + 1
class cg2unpacker(cg1unpacker):
"""Unpacker for cg2 streams.
@@ -506,7 +492,9 @@
"""Given a source repo, construct a bundler.
bundlecaps is optional and can be used to specify the set of
- capabilities which can be used to build the bundle.
+ capabilities which can be used to build the bundle. While bundlecaps is
+ unused in core Mercurial, extensions rely on this feature to communicate
+ capabilities to customize the changegroup packer.
"""
# Set of capabilities we can use to build the bundle.
if bundlecaps is None:
@@ -974,8 +962,8 @@
bundler = getbundler(version, repo, bundlecaps)
return getsubsetraw(repo, outgoing, bundler, source)
-def getlocalchangegroup(repo, source, outgoing, bundlecaps=None,
- version='01'):
+def getchangegroup(repo, source, outgoing, bundlecaps=None,
+ version='01'):
"""Like getbundle, but taking a discovery.outgoing as an argument.
This is only implemented for local repos and reuses potentially
@@ -985,18 +973,10 @@
bundler = getbundler(version, repo, bundlecaps)
return getsubset(repo, outgoing, bundler, source)
-def getchangegroup(repo, source, outgoing, bundlecaps=None,
- version='01'):
- """Like changegroupsubset, but returns the set difference between the
- ancestors of heads and the ancestors common.
-
- If heads is None, use the local heads. If common is None, use [nullid].
-
- The nodes in common might not all be known locally due to the way the
- current discovery protocol works.
- """
- return getlocalchangegroup(repo, source, outgoing, bundlecaps=bundlecaps,
- version=version)
+def getlocalchangegroup(repo, *args, **kwargs):
+ repo.ui.deprecwarn('getlocalchangegroup is deprecated, use getchangegroup',
+ '4.3')
+ return getchangegroup(repo, *args, **kwargs)
def changegroup(repo, basenodes, source):
# to avoid a race we use changegroupsubset() (issue1320)
--- a/mercurial/changelog.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/changelog.py Tue Jun 20 16:33:46 2017 -0400
@@ -190,7 +190,7 @@
# The list of files may be empty. Which means nl3 is the first of the
# double newline that precedes the description.
- if text[nl3 + 1] == '\n':
+ if text[nl3 + 1:nl3 + 2] == '\n':
doublenl = nl3
else:
doublenl = text.index('\n\n', nl3 + 1)
@@ -258,12 +258,28 @@
return encoding.tolocal(self._text[self._offsets[3] + 2:])
class changelog(revlog.revlog):
- def __init__(self, opener):
- revlog.revlog.__init__(self, opener, "00changelog.i",
+ def __init__(self, opener, trypending=False):
+ """Load a changelog revlog using an opener.
+
+ If ``trypending`` is true, we attempt to load the index from a
+ ``00changelog.i.a`` file instead of the default ``00changelog.i``.
+ The ``00changelog.i.a`` file contains index (and possibly inline
+ revision) data for a transaction that hasn't been finalized yet.
+ It exists in a separate file to facilitate readers (such as
+ hooks processes) accessing data before a transaction is finalized.
+ """
+ if trypending and opener.exists('00changelog.i.a'):
+ indexfile = '00changelog.i.a'
+ else:
+ indexfile = '00changelog.i'
+
+ datafile = '00changelog.d'
+ revlog.revlog.__init__(self, opener, indexfile, datafile=datafile,
checkambig=True)
+
if self._initempty:
# changelogs don't benefit from generaldelta
- self.version &= ~revlog.REVLOGGENERALDELTA
+ self.version &= ~revlog.FLAG_GENERALDELTA
self._generaldelta = False
# Delta chains for changelogs tend to be very small because entries
@@ -401,27 +417,6 @@
# split when we're done
self.checkinlinesize(tr)
- def readpending(self, file):
- """read index data from a "pending" file
-
- During a transaction, the actual changeset data is already stored in the
- main file, but not yet finalized in the on-disk index. Instead, a
- "pending" index is written by the transaction logic. If this function
- is running, we are likely in a subprocess invoked in a hook. The
- subprocess is informed that it is within a transaction and needs to
- access its content.
-
- This function will read all the index data out of the pending file and
- overwrite the main index."""
-
- if not self.opener.exists(file):
- return # no pending data for changelog
- r = revlog.revlog(self.opener, file)
- self.index = r.index
- self.nodemap = r.nodemap
- self._nodecache = r._nodecache
- self._chunkcache = r._chunkcache
-
def _writepending(self, tr):
"create a file containing the unfinalized state for pretxnchangegroup"
if self._delaybuf:
@@ -535,3 +530,14 @@
just to access this is costly."""
extra = self.read(rev)[5]
return encoding.tolocal(extra.get("branch")), 'close' in extra
+
+ def _addrevision(self, node, rawtext, transaction, *args, **kwargs):
+ # overlay over the standard revlog._addrevision to track the new
+ # revision on the transaction.
+ rev = len(self)
+ node = super(changelog, self)._addrevision(node, rawtext, transaction,
+ *args, **kwargs)
+ revs = transaction.changes.get('revs')
+ if revs is not None:
+ revs.add(rev)
+ return node
--- a/mercurial/chgserver.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/chgserver.py Tue Jun 20 16:33:46 2017 -0400
@@ -44,6 +44,7 @@
import inspect
import os
import re
+import socket
import struct
import time
@@ -54,7 +55,6 @@
encoding,
error,
extensions,
- osutil,
pycompat,
util,
)
@@ -75,7 +75,8 @@
# sensitive environment variables affecting confighash
_envre = re.compile(r'''\A(?:
CHGHG
- |HG(?:[A-Z].*)?
+ |HG(?:DEMANDIMPORT|EMITWARNINGS|MODULEPOLICY|PROF|RCPATH)?
+ |HG(?:ENCODING|PLAIN).*
|LANG(?:UAGE)?
|LC_.*
|LD_.*
@@ -313,7 +314,7 @@
# tell client to sendmsg() with 1-byte payload, which makes it
# distinctive from "attachio\n" command consumed by client.read()
self.clientsock.sendall(struct.pack('>cI', 'I', 1))
- clientfds = osutil.recvfds(self.clientsock.fileno())
+ clientfds = util.recvfds(self.clientsock.fileno())
_log('received fds: %r\n' % clientfds)
ui = self.ui
@@ -458,12 +459,12 @@
'setenv': setenv,
'setumask': setumask})
- if util.safehasattr(osutil, 'setprocname'):
+ if util.safehasattr(util, 'setprocname'):
def setprocname(self):
"""Change process title"""
name = self._readstr()
_log('setprocname: %r\n' % name)
- osutil.setprocname(name)
+ util.setprocname(name)
capabilities['setprocname'] = setprocname
def _tempaddress(address):
@@ -492,6 +493,7 @@
self._checkextensions()
self._bind(sock)
self._createsymlink()
+ # no "listening at" message should be printed to simulate hg behavior
def _inithashstate(self, address):
self._baseaddress = address
@@ -517,6 +519,7 @@
tempaddress = _tempaddress(self._realaddress)
util.bindunixsocket(sock, tempaddress)
self._socketstat = os.stat(tempaddress)
+ sock.listen(socket.SOMAXCONN)
# rename will replace the old socket file if exists atomically. the
# old server will detect ownership change and exit.
util.rename(tempaddress, self._realaddress)
@@ -545,10 +548,6 @@
# the client will start a new server on demand.
util.tryunlink(self._realaddress)
- def printbanner(self, address):
- # no "listening at" message should be printed to simulate hg behavior
- pass
-
def shouldexit(self):
if not self._issocketowner():
self.ui.debug('%s is not owned, exiting.\n' % self._realaddress)
--- a/mercurial/cmdutil.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/cmdutil.py Tue Jun 20 16:33:46 2017 -0400
@@ -15,7 +15,6 @@
from .i18n import _
from .node import (
- bin,
hex,
nullid,
nullrev,
@@ -31,13 +30,13 @@
error,
formatter,
graphmod,
- lock as lockmod,
match as matchmod,
obsolete,
patch,
pathutil,
phases,
pycompat,
+ registrar,
repair,
revlog,
revset,
@@ -50,6 +49,113 @@
)
stringio = util.stringio
+# templates of common command options
+
+dryrunopts = [
+ ('n', 'dry-run', None,
+ _('do not perform actions, just print output')),
+]
+
+remoteopts = [
+ ('e', 'ssh', '',
+ _('specify ssh command to use'), _('CMD')),
+ ('', 'remotecmd', '',
+ _('specify hg command to run on the remote side'), _('CMD')),
+ ('', 'insecure', None,
+ _('do not verify server certificate (ignoring web.cacerts config)')),
+]
+
+walkopts = [
+ ('I', 'include', [],
+ _('include names matching the given patterns'), _('PATTERN')),
+ ('X', 'exclude', [],
+ _('exclude names matching the given patterns'), _('PATTERN')),
+]
+
+commitopts = [
+ ('m', 'message', '',
+ _('use text as commit message'), _('TEXT')),
+ ('l', 'logfile', '',
+ _('read commit message from file'), _('FILE')),
+]
+
+commitopts2 = [
+ ('d', 'date', '',
+ _('record the specified date as commit date'), _('DATE')),
+ ('u', 'user', '',
+ _('record the specified user as committer'), _('USER')),
+]
+
+# hidden for now
+formatteropts = [
+ ('T', 'template', '',
+ _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
+]
+
+templateopts = [
+ ('', 'style', '',
+ _('display using template map file (DEPRECATED)'), _('STYLE')),
+ ('T', 'template', '',
+ _('display with template'), _('TEMPLATE')),
+]
+
+logopts = [
+ ('p', 'patch', None, _('show patch')),
+ ('g', 'git', None, _('use git extended diff format')),
+ ('l', 'limit', '',
+ _('limit number of changes displayed'), _('NUM')),
+ ('M', 'no-merges', None, _('do not show merges')),
+ ('', 'stat', None, _('output diffstat-style summary of changes')),
+ ('G', 'graph', None, _("show the revision DAG")),
+] + templateopts
+
+diffopts = [
+ ('a', 'text', None, _('treat all files as text')),
+ ('g', 'git', None, _('use git extended diff format')),
+ ('', 'binary', None, _('generate binary diffs in git mode (default)')),
+ ('', 'nodates', None, _('omit dates from diff headers'))
+]
+
+diffwsopts = [
+ ('w', 'ignore-all-space', None,
+ _('ignore white space when comparing lines')),
+ ('b', 'ignore-space-change', None,
+ _('ignore changes in the amount of white space')),
+ ('B', 'ignore-blank-lines', None,
+ _('ignore changes whose lines are all blank')),
+]
+
+diffopts2 = [
+ ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
+ ('p', 'show-function', None, _('show which function each change is in')),
+ ('', 'reverse', None, _('produce a diff that undoes the changes')),
+] + diffwsopts + [
+ ('U', 'unified', '',
+ _('number of lines of context to show'), _('NUM')),
+ ('', 'stat', None, _('output diffstat-style summary of changes')),
+ ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
+]
+
+mergetoolopts = [
+ ('t', 'tool', '', _('specify merge tool')),
+]
+
+similarityopts = [
+ ('s', 'similarity', '',
+ _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
+]
+
+subrepoopts = [
+ ('S', 'subrepos', None,
+ _('recurse into subrepositories'))
+]
+
+debugrevlogopts = [
+ ('c', 'changelog', False, _('open changelog')),
+ ('m', 'manifest', False, _('open manifest')),
+ ('', 'dir', '', _('open directory manifest')),
+]
+
# special string such that everything below this line will be ingored in the
# editor text
_linebelow = "^HG: ------------------------ >8 ------------------------$"
@@ -115,6 +221,7 @@
def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
filterfn, *pats, **opts):
from . import merge as mergemod
+ opts = pycompat.byteskwargs(opts)
if not ui.interactive():
if cmdsuggest:
msg = _('running non-interactively, use %s instead') % cmdsuggest
@@ -340,12 +447,11 @@
return choice[cmd]
if len(choice) > 1:
- clist = choice.keys()
- clist.sort()
+ clist = sorted(choice)
raise error.AmbiguousCommand(cmd, clist)
if choice:
- return choice.values()[0]
+ return list(choice.values())[0]
raise error.UnknownCommand(cmd, allcmds)
@@ -385,7 +491,7 @@
'exclusive'))
if not message and logfile:
try:
- if logfile == '-':
+ if isstdiofilename(logfile):
message = ui.fin.read()
else:
message = '\n'.join(util.readfile(logfile).splitlines())
@@ -490,10 +596,10 @@
patlen = len(pat)
i = 0
while i < patlen:
- c = pat[i]
+ c = pat[i:i + 1]
if c == '%':
i += 1
- c = pat[i]
+ c = pat[i:i + 1]
c = expander[c]()
newname.append(c)
i += 1
@@ -502,6 +608,10 @@
raise error.Abort(_("invalid format spec '%%%s' in output filename") %
inst.args[0])
+def isstdiofilename(pat):
+ """True if the given pat looks like a filename denoting stdin/stdout"""
+ return not pat or pat == '-'
+
class _unclosablefile(object):
def __init__(self, fp):
self._fp = fp
@@ -527,16 +637,12 @@
writable = mode not in ('r', 'rb')
- if not pat or pat == '-':
+ if isstdiofilename(pat):
if writable:
fp = repo.ui.fout
else:
fp = repo.ui.fin
return _unclosablefile(fp)
- if util.safehasattr(pat, 'write') and writable:
- return pat
- if util.safehasattr(pat, 'read') and 'r' in mode:
- return pat
fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
if modemap is not None:
mode = modemap.get(fn, mode)
@@ -606,8 +712,8 @@
badstates = '?'
else:
badstates = '?r'
- m = scmutil.match(repo[None], [pat], opts, globbed=True)
- for abs in repo.walk(m):
+ m = scmutil.match(wctx, [pat], opts, globbed=True)
+ for abs in wctx.walk(m):
state = repo.dirstate[abs]
rel = m.rel(abs)
exact = m.exact(abs)
@@ -1001,11 +1107,13 @@
editor = None
else:
editor = getcommiteditor(editform='import.bypass')
- memctx = context.makememctx(repo, (p1.node(), p2.node()),
+ memctx = context.memctx(repo, (p1.node(), p2.node()),
message,
- user,
- date,
- branch, files, store,
+ files=files,
+ filectxfn=store,
+ user=user,
+ date=date,
+ branch=branch,
editor=editor)
n = memctx.commit()
finally:
@@ -1032,70 +1140,97 @@
# it is given two arguments (sequencenumber, changectx)
extraexportmap = {}
-def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
+def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
+ node = scmutil.binnode(ctx)
+ parents = [p.node() for p in ctx.parents() if p]
+ branch = ctx.branch()
+ if switch_parent:
+ parents.reverse()
+
+ if parents:
+ prev = parents[0]
+ else:
+ prev = nullid
+
+ write("# HG changeset patch\n")
+ write("# User %s\n" % ctx.user())
+ write("# Date %d %d\n" % ctx.date())
+ write("# %s\n" % util.datestr(ctx.date()))
+ if branch and branch != 'default':
+ write("# Branch %s\n" % branch)
+ write("# Node ID %s\n" % hex(node))
+ write("# Parent %s\n" % hex(prev))
+ if len(parents) > 1:
+ write("# Parent %s\n" % hex(parents[1]))
+
+ for headerid in extraexport:
+ header = extraexportmap[headerid](seqno, ctx)
+ if header is not None:
+ write('# %s\n' % header)
+ write(ctx.description().rstrip())
+ write("\n\n")
+
+ for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
+ write(chunk, label=label)
+
+def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
opts=None, match=None):
- '''export changesets as hg patches.'''
+ '''export changesets as hg patches
+
+ Args:
+ repo: The repository from which we're exporting revisions.
+ revs: A list of revisions to export as revision numbers.
+ fntemplate: An optional string to use for generating patch file names.
+ fp: An optional file-like object to which patches should be written.
+ switch_parent: If True, show diffs against second parent when not nullid.
+ Default is false, which always shows diff against p1.
+ opts: diff options to use for generating the patch.
+ match: If specified, only export changes to files matching this matcher.
+
+ Returns:
+ Nothing.
+
+ Side Effect:
+ "HG Changeset Patch" data is emitted to one of the following
+ destinations:
+ fp is specified: All revs are written to the specified
+ file-like object.
+ fntemplate specified: Each rev is written to a unique file named using
+ the given template.
+ Neither fp nor template specified: All revs written to repo.ui.write()
+ '''
total = len(revs)
- revwidth = max([len(str(rev)) for rev in revs])
+ revwidth = max(len(str(rev)) for rev in revs)
filemode = {}
- def single(rev, seqno, fp):
+ write = None
+ dest = '<unnamed>'
+ if fp:
+ dest = getattr(fp, 'name', dest)
+ def write(s, **kw):
+ fp.write(s)
+ elif not fntemplate:
+ write = repo.ui.write
+
+ for seqno, rev in enumerate(revs, 1):
ctx = repo[rev]
- node = ctx.node()
- parents = [p.node() for p in ctx.parents() if p]
- branch = ctx.branch()
- if switch_parent:
- parents.reverse()
-
- if parents:
- prev = parents[0]
- else:
- prev = nullid
-
- shouldclose = False
- if not fp and len(template) > 0:
+ fo = None
+ if not fp and fntemplate:
desc_lines = ctx.description().rstrip().split('\n')
desc = desc_lines[0] #Commit always has a first line.
- fp = makefileobj(repo, template, node, desc=desc, total=total,
- seqno=seqno, revwidth=revwidth, mode='wb',
- modemap=filemode)
- shouldclose = True
- if fp and not getattr(fp, 'name', '<unnamed>').startswith('<'):
- repo.ui.note("%s\n" % fp.name)
-
- if not fp:
- write = repo.ui.write
- else:
+ fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
+ total=total, seqno=seqno, revwidth=revwidth,
+ mode='wb', modemap=filemode)
+ dest = fo.name
def write(s, **kw):
- fp.write(s)
-
- write("# HG changeset patch\n")
- write("# User %s\n" % ctx.user())
- write("# Date %d %d\n" % ctx.date())
- write("# %s\n" % util.datestr(ctx.date()))
- if branch and branch != 'default':
- write("# Branch %s\n" % branch)
- write("# Node ID %s\n" % hex(node))
- write("# Parent %s\n" % hex(prev))
- if len(parents) > 1:
- write("# Parent %s\n" % hex(parents[1]))
-
- for headerid in extraexport:
- header = extraexportmap[headerid](seqno, ctx)
- if header is not None:
- write('# %s\n' % header)
- write(ctx.description().rstrip())
- write("\n\n")
-
- for chunk, label in patch.diffui(repo, prev, node, match, opts=opts):
- write(chunk, label=label)
-
- if shouldclose:
- fp.close()
-
- for seqno, rev in enumerate(revs):
- single(rev, seqno + 1, fp)
+ fo.write(s)
+ if not dest.startswith('<'):
+ repo.ui.note("%s\n" % dest)
+ _exportsingle(
+ repo, ctx, match, switch_parent, rev, seqno, write, opts)
+ if fo is not None:
+ fo.close()
def diffordiffstat(ui, repo, diffopts, node1, node2, match,
changes=None, stat=False, fp=None, prefix='',
@@ -1214,7 +1349,7 @@
hexfunc = short
# as of now, wctx.node() and wctx.rev() return None, but we want to
# show the same values as {node} and {rev} templatekw
- revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex())))
+ revnode = (scmutil.intrev(ctx), hexfunc(scmutil.binnode(ctx)))
if self.ui.quiet:
self.ui.write("%d:%s\n" % revnode, label='log.node')
@@ -1360,7 +1495,7 @@
if rev is None:
jrev = jnode = 'null'
else:
- jrev = str(rev)
+ jrev = '%d' % rev
jnode = '"%s"' % hex(ctx.node())
j = encoding.jsonescape
@@ -1442,17 +1577,10 @@
class changeset_templater(changeset_printer):
'''format changeset information.'''
- def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
+ def __init__(self, ui, repo, tmplspec, matchfn, diffopts, buffered):
changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
- assert not (tmpl and mapfile)
- defaulttempl = templatekw.defaulttempl
- if mapfile:
- self.t = templater.templater.frommapfile(mapfile,
- cache=defaulttempl)
- else:
- self.t = formatter.maketemplater(ui, 'changeset', tmpl,
- cache=defaulttempl)
-
+ self.t = formatter.loadtemplater(ui, tmplspec,
+ cache=templatekw.defaulttempl)
self._counter = itertools.count()
self.cache = {}
@@ -1464,7 +1592,9 @@
(self.ui.debugflag, 'debug'),
]
- self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
+ self._tref = tmplspec.ref
+ self._parts = {'header': '', 'footer': '',
+ tmplspec.ref: tmplspec.ref,
'docheader': '', 'docfooter': ''}
for mode, postfix in tmplmodes:
for t in self._parts:
@@ -1495,6 +1625,7 @@
props['index'] = next(self._counter)
props['revcache'] = {'copies': copies}
props['cache'] = self.cache
+ props = pycompat.strkwargs(props)
# write header
if self._parts['header']:
@@ -1507,7 +1638,7 @@
self.ui.write(h)
# write changeset metadata, then patch if requested
- key = self._parts['changeset']
+ key = self._parts[self._tref]
self.ui.write(templater.stringify(self.t(key, **props)))
self.showpatch(ctx, matchfn)
@@ -1516,16 +1647,23 @@
self.footer = templater.stringify(
self.t(self._parts['footer'], **props))
-def gettemplate(ui, tmpl, style):
- """
- Find the template matching the given template spec or style.
+def logtemplatespec(tmpl, mapfile):
+ if mapfile:
+ return formatter.templatespec('changeset', tmpl, mapfile)
+ else:
+ return formatter.templatespec('', tmpl, None)
+
+def _lookuplogtemplate(ui, tmpl, style):
+ """Find the template matching the given template spec or style
+
+ See formatter.lookuptemplate() for details.
"""
# ui settings
if not tmpl and not style: # template are stronger than style
tmpl = ui.config('ui', 'logtemplate')
if tmpl:
- return templater.unquotestring(tmpl), None
+ return logtemplatespec(templater.unquotestring(tmpl), None)
else:
style = util.expandpath(ui.config('ui', 'style', ''))
@@ -1536,13 +1674,19 @@
or templater.templatepath(mapfile))
if mapname:
mapfile = mapname
- return None, mapfile
+ return logtemplatespec(None, mapfile)
if not tmpl:
- return None, None
+ return logtemplatespec(None, None)
return formatter.lookuptemplate(ui, 'changeset', tmpl)
+def makelogtemplater(ui, repo, tmpl, buffered=False):
+ """Create a changeset_templater from a literal template 'tmpl'"""
+ spec = logtemplatespec(tmpl, None)
+ return changeset_templater(ui, repo, spec, matchfn=None, diffopts={},
+ buffered=buffered)
+
def show_changeset(ui, repo, opts, buffered=False):
"""show one changeset using template or regular display.
@@ -1562,12 +1706,12 @@
if opts.get('template') == 'json':
return jsonchangeset(ui, repo, matchfn, opts, buffered)
- tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
-
- if not tmpl and not mapfile:
+ spec = _lookuplogtemplate(ui, opts.get('template'), opts.get('style'))
+
+ if not spec.ref and not spec.tmpl and not spec.mapfile:
return changeset_printer(ui, repo, matchfn, opts, buffered)
- return changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile, buffered)
+ return changeset_templater(ui, repo, spec, matchfn, opts, buffered)
def showmarker(fm, marker, index=None):
"""utility function to display obsolescence marker in a readable way
@@ -1607,7 +1751,7 @@
if rev in results:
ui.status(_("found revision %s from %s\n") %
(rev, util.datestr(results[rev])))
- return str(rev)
+ return '%d' % rev
raise error.Abort(_("revision matching date not found"))
@@ -1691,7 +1835,7 @@
last = filelog.rev(node)
# keep track of all ancestors of the file
- ancestors = set([filelog.linkrev(last)])
+ ancestors = {filelog.linkrev(last)}
# iterate from latest to oldest revision
for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
@@ -2155,7 +2299,7 @@
return templatekw.showgraphnode # fast path for "{graphnode}"
spec = templater.unquotestring(spec)
- templ = formatter.gettemplater(ui, 'graphnode', spec)
+ templ = formatter.maketemplater(ui, spec)
cache = {}
if isinstance(displayer, changeset_templater):
cache = displayer.cache # reuse cache of slow templates
@@ -2167,7 +2311,7 @@
props['repo'] = repo
props['ui'] = repo.ui
props['revcache'] = {}
- return templater.stringify(templ('graphnode', **props))
+ return templ.render(props)
return formatnode
def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
@@ -2278,7 +2422,7 @@
sub = wctx.sub(subpath)
try:
submatch = matchmod.subdirmatcher(subpath, match)
- if opts.get('subrepos'):
+ if opts.get(r'subrepos'):
bad.extend(sub.add(ui, submatch, prefix, False, **opts))
else:
bad.extend(sub.add(ui, submatch, prefix, True, **opts))
@@ -2286,7 +2430,7 @@
ui.status(_("skipping missing subrepository: %s\n")
% join(subpath))
- if not opts.get('dry_run'):
+ if not opts.get(r'dry_run'):
rejected = wctx.add(names, prefix)
bad.extend(f for f in rejected if f in match.files())
return bad
@@ -2308,7 +2452,7 @@
forgot = []
s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
- forget = sorted(s[0] + s[1] + s[3] + s[6])
+ forget = sorted(s.modified + s.added + s.deleted + s.clean)
if explicitonly:
forget = [f for f in forget if match.exact(f)]
@@ -2496,17 +2640,21 @@
return ret
-def cat(ui, repo, ctx, matcher, prefix, **opts):
+def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
err = 1
def write(path):
- fp = makefileobj(repo, opts.get('output'), ctx.node(),
- pathname=os.path.join(prefix, path))
- data = ctx[path].data()
- if opts.get('decode'):
- data = repo.wwritedata(path, data)
- fp.write(data)
- fp.close()
+ filename = None
+ if fntemplate:
+ filename = makefilename(repo, fntemplate, ctx.node(),
+ pathname=os.path.join(prefix, path))
+ with formatter.maybereopen(basefm, filename, opts) as fm:
+ data = ctx[path].data()
+ if opts.get('decode'):
+ data = repo.wwritedata(path, data)
+ fm.startitem()
+ fm.write('data', '%s', data)
+ fm.data(abspath=path, path=matcher.rel(path))
# Automation often uses hg cat on single files, so special case it
# for performance to avoid the cost of parsing the manifest.
@@ -2530,8 +2678,8 @@
try:
submatch = matchmod.subdirmatcher(subpath, matcher)
- if not sub.cat(submatch, os.path.join(prefix, sub._path),
- **opts):
+ if not sub.cat(submatch, basefm, fntemplate,
+ os.path.join(prefix, sub._path), **opts):
err = 0
except error.RepoLookupError:
ui.status(_("skipping missing subrepository: %s\n")
@@ -2581,10 +2729,8 @@
base = old.p1()
createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
- wlock = lock = newid = None
- try:
- wlock = repo.wlock()
- lock = repo.lock()
+ newid = None
+ with repo.wlock(), repo.lock():
with repo.transaction('amend') as tr:
# See if we got a message from -m or -l, if not, open the editor
# with the message of the changeset to amend
@@ -2739,7 +2885,7 @@
if node:
obs.append((ctx, ()))
- obsolete.createmarkers(repo, obs)
+ obsolete.createmarkers(repo, obs, operation='amend')
if not createmarkers and newid != old.node():
# Strip the intermediate commit (if there was one) and the amended
# commit
@@ -2747,8 +2893,6 @@
ui.note(_('stripping intermediate changeset %s\n') % ctx)
ui.note(_('stripping amended changeset %s\n') % old)
repair.strip(ui, repo, old.node(), topic='amend-backup')
- finally:
- lockmod.release(lock, wlock)
return newid
def commiteditor(repo, ctx, subs, editform=''):
@@ -2766,11 +2910,10 @@
forms.insert(0, 'changeset')
templatetext = None
while forms:
- tmpl = repo.ui.config('committemplate', '.'.join(forms))
- if tmpl:
- tmpl = templater.unquotestring(tmpl)
+ ref = '.'.join(forms)
+ if repo.ui.config('committemplate', ref):
templatetext = committext = buildcommittemplate(
- repo, ctx, subs, extramsg, tmpl)
+ repo, ctx, subs, extramsg, ref)
break
forms.pop()
else:
@@ -2808,15 +2951,12 @@
return text
-def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
+def buildcommittemplate(repo, ctx, subs, extramsg, ref):
ui = repo.ui
- tmpl, mapfile = gettemplate(ui, tmpl, None)
-
- t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
-
- for k, v in repo.ui.configitems('committemplate'):
- if k != 'changeset':
- t.t.cache[k] = v
+ spec = formatter.templatespec(ref, None, None)
+ t = changeset_templater(ui, repo, spec, None, {}, False)
+ t.t.cache.update((k, templater.unquotestring(v))
+ for k, v in repo.ui.configitems('committemplate'))
if not extramsg:
extramsg = '' # ensure that extramsg is string
@@ -2938,7 +3078,8 @@
targetsubs = sorted(s for s in wctx.substate if m(s))
if not m.always():
- for abs in repo.walk(matchmod.badmatch(m, lambda x, y: False)):
+ matcher = matchmod.badmatch(m, lambda x, y: False)
+ for abs in wctx.walk(matcher):
names[abs] = m.rel(abs), m.exact(abs)
# walk target manifest to fill `names`
@@ -3332,50 +3473,10 @@
if f in copied:
repo.dirstate.copy(copied[f], f)
-def command(table):
- """Returns a function object to be used as a decorator for making commands.
-
- This function receives a command table as its argument. The table should
- be a dict.
-
- The returned function can be used as a decorator for adding commands
- to that command table. This function accepts multiple arguments to define
- a command.
-
- The first argument is the command name.
-
- The options argument is an iterable of tuples defining command arguments.
- See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
-
- The synopsis argument defines a short, one line summary of how to use the
- command. This shows up in the help output.
-
- The norepo argument defines whether the command does not require a
- local repository. Most commands operate against a repository, thus the
- default is False.
-
- The optionalrepo argument defines whether the command optionally requires
- a local repository.
-
- The inferrepo argument defines whether to try to find a repository from the
- command line arguments. If True, arguments will be examined for potential
- repository locations. See ``findrepo()``. If a repository is found, it
- will be used.
- """
- def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
- inferrepo=False):
- def decorator(func):
- func.norepo = norepo
- func.optionalrepo = optionalrepo
- func.inferrepo = inferrepo
- if synopsis:
- table[name] = func, list(options), synopsis
- else:
- table[name] = func, list(options)
- return func
- return decorator
-
- return cmd
+class command(registrar.command):
+ def _doregister(self, func, name, *args, **kwargs):
+ func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
+ return super(command, self)._doregister(func, name, *args, **kwargs)
# a list of (ui, repo, otherpeer, opts, missing) functions called by
# commands.outgoing. "missing" is "missing" of the result of
--- a/mercurial/color.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/color.py Tue Jun 20 16:33:46 2017 -0400
@@ -215,24 +215,34 @@
mode = ui.config('color', 'pagermode', mode)
realmode = mode
- if mode == 'auto':
- if pycompat.osname == 'nt':
- term = encoding.environ.get('TERM')
- # TERM won't be defined in a vanilla cmd.exe environment.
+ if pycompat.osname == 'nt':
+ from . import win32
+
+ term = encoding.environ.get('TERM')
+ # TERM won't be defined in a vanilla cmd.exe environment.
- # UNIX-like environments on Windows such as Cygwin and MSYS will
- # set TERM. They appear to make a best effort attempt at setting it
- # to something appropriate. However, not all environments with TERM
- # defined support ANSI. Since "ansi" could result in terminal
- # gibberish, we error on the side of selecting "win32". However, if
- # w32effects is not defined, we almost certainly don't support
- # "win32", so don't even try.
- if (term and 'xterm' in term) or not w32effects:
+ # UNIX-like environments on Windows such as Cygwin and MSYS will
+ # set TERM. They appear to make a best effort attempt at setting it
+ # to something appropriate. However, not all environments with TERM
+ # defined support ANSI.
+ ansienviron = term and 'xterm' in term
+
+ if mode == 'auto':
+ # Since "ansi" could result in terminal gibberish, we error on the
+ # side of selecting "win32". However, if w32effects is not defined,
+ # we almost certainly don't support "win32", so don't even try.
+ # w32ffects is not populated when stdout is redirected, so checking
+ # it first avoids win32 calls in a state known to error out.
+ if ansienviron or not w32effects or win32.enablevtmode():
realmode = 'ansi'
else:
realmode = 'win32'
- else:
- realmode = 'ansi'
+ # An empty w32effects is a clue that stdout is redirected, and thus
+ # cannot enable VT mode.
+ elif mode == 'ansi' and w32effects and not ansienviron:
+ win32.enablevtmode()
+ elif mode == 'auto':
+ realmode = 'ansi'
def modewarn():
# only warn if color.mode was explicitly set and we're in
@@ -442,10 +452,10 @@
'inverse': _COMMON_LVB_REVERSE_VIDEO, # double-byte charsets only
}
- passthrough = set([_FOREGROUND_INTENSITY,
- _BACKGROUND_INTENSITY,
- _COMMON_LVB_UNDERSCORE,
- _COMMON_LVB_REVERSE_VIDEO])
+ passthrough = {_FOREGROUND_INTENSITY,
+ _BACKGROUND_INTENSITY,
+ _COMMON_LVB_UNDERSCORE,
+ _COMMON_LVB_REVERSE_VIDEO}
stdout = _kernel32.GetStdHandle(
_STD_OUTPUT_HANDLE) # don't close the handle returned
--- a/mercurial/commands.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/commands.py Tue Jun 20 16:33:46 2017 -0400
@@ -11,6 +11,7 @@
import errno
import os
import re
+import sys
from .i18n import _
from .node import (
@@ -26,6 +27,7 @@
changegroup,
cmdutil,
copies,
+ debugcommands as debugcommandsmod,
destutil,
dirstateguard,
discovery,
@@ -33,6 +35,7 @@
error,
exchange,
extensions,
+ formatter,
graphmod,
hbisect,
help,
@@ -44,6 +47,7 @@
phases,
pycompat,
rcutil,
+ registrar,
revsetlang,
scmutil,
server,
@@ -58,8 +62,9 @@
release = lockmod.release
table = {}
-
-command = cmdutil.command(table)
+table.update(debugcommandsmod.command._table)
+
+command = registrar.command(table)
# label constants
# until 3.5, bookmarks.current was the advertised name, not
@@ -103,108 +108,21 @@
_("when to paginate (boolean, always, auto, or never)"), _('TYPE')),
]
-dryrunopts = [('n', 'dry-run', None,
- _('do not perform actions, just print output'))]
-
-remoteopts = [
- ('e', 'ssh', '',
- _('specify ssh command to use'), _('CMD')),
- ('', 'remotecmd', '',
- _('specify hg command to run on the remote side'), _('CMD')),
- ('', 'insecure', None,
- _('do not verify server certificate (ignoring web.cacerts config)')),
-]
-
-walkopts = [
- ('I', 'include', [],
- _('include names matching the given patterns'), _('PATTERN')),
- ('X', 'exclude', [],
- _('exclude names matching the given patterns'), _('PATTERN')),
-]
-
-commitopts = [
- ('m', 'message', '',
- _('use text as commit message'), _('TEXT')),
- ('l', 'logfile', '',
- _('read commit message from file'), _('FILE')),
-]
-
-commitopts2 = [
- ('d', 'date', '',
- _('record the specified date as commit date'), _('DATE')),
- ('u', 'user', '',
- _('record the specified user as committer'), _('USER')),
-]
-
-# hidden for now
-formatteropts = [
- ('T', 'template', '',
- _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
-]
-
-templateopts = [
- ('', 'style', '',
- _('display using template map file (DEPRECATED)'), _('STYLE')),
- ('T', 'template', '',
- _('display with template'), _('TEMPLATE')),
-]
-
-logopts = [
- ('p', 'patch', None, _('show patch')),
- ('g', 'git', None, _('use git extended diff format')),
- ('l', 'limit', '',
- _('limit number of changes displayed'), _('NUM')),
- ('M', 'no-merges', None, _('do not show merges')),
- ('', 'stat', None, _('output diffstat-style summary of changes')),
- ('G', 'graph', None, _("show the revision DAG")),
-] + templateopts
-
-diffopts = [
- ('a', 'text', None, _('treat all files as text')),
- ('g', 'git', None, _('use git extended diff format')),
- ('', 'binary', None, _('generate binary diffs in git mode (default)')),
- ('', 'nodates', None, _('omit dates from diff headers'))
-]
-
-diffwsopts = [
- ('w', 'ignore-all-space', None,
- _('ignore white space when comparing lines')),
- ('b', 'ignore-space-change', None,
- _('ignore changes in the amount of white space')),
- ('B', 'ignore-blank-lines', None,
- _('ignore changes whose lines are all blank')),
- ]
-
-diffopts2 = [
- ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
- ('p', 'show-function', None, _('show which function each change is in')),
- ('', 'reverse', None, _('produce a diff that undoes the changes')),
- ] + diffwsopts + [
- ('U', 'unified', '',
- _('number of lines of context to show'), _('NUM')),
- ('', 'stat', None, _('output diffstat-style summary of changes')),
- ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
-]
-
-mergetoolopts = [
- ('t', 'tool', '', _('specify merge tool')),
-]
-
-similarityopts = [
- ('s', 'similarity', '',
- _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
-]
-
-subrepoopts = [
- ('S', 'subrepos', None,
- _('recurse into subrepositories'))
-]
-
-debugrevlogopts = [
- ('c', 'changelog', False, _('open changelog')),
- ('m', 'manifest', False, _('open manifest')),
- ('', 'dir', '', _('open directory manifest')),
-]
+dryrunopts = cmdutil.dryrunopts
+remoteopts = cmdutil.remoteopts
+walkopts = cmdutil.walkopts
+commitopts = cmdutil.commitopts
+commitopts2 = cmdutil.commitopts2
+formatteropts = cmdutil.formatteropts
+templateopts = cmdutil.templateopts
+logopts = cmdutil.logopts
+diffopts = cmdutil.diffopts
+diffwsopts = cmdutil.diffwsopts
+diffopts2 = cmdutil.diffopts2
+mergetoolopts = cmdutil.mergetoolopts
+similarityopts = cmdutil.similarityopts
+subrepoopts = cmdutil.subrepoopts
+debugrevlogopts = cmdutil.debugrevlogopts
# Commands start here, listed alphabetically
@@ -255,7 +173,7 @@
Returns 0 if all files are successfully added.
"""
- m = scmutil.match(repo[None], pats, opts)
+ m = scmutil.match(repo[None], pats, pycompat.byteskwargs(opts))
rejected = cmdutil.add(ui, repo, m, "", False, **opts)
return rejected and 1 or 0
@@ -325,6 +243,7 @@
Returns 0 if all files are successfully added.
"""
+ opts = pycompat.byteskwargs(opts)
try:
sim = float(opts.get('similarity') or 100)
except ValueError:
@@ -345,7 +264,8 @@
('d', 'date', None, _('list the date (short with -q)')),
('n', 'number', None, _('list the revision number (default)')),
('c', 'changeset', None, _('list the changeset')),
- ('l', 'line-number', None, _('show line number at the first appearance'))
+ ('l', 'line-number', None, _('show line number at the first appearance')),
+ ('', 'skip', [], _('revision to not display (EXPERIMENTAL)'), _('REV')),
] + diffwsopts + walkopts + formatteropts,
_('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
inferrepo=True)
@@ -368,6 +288,7 @@
Returns 0 on success.
"""
+ opts = pycompat.byteskwargs(opts)
if not pats:
raise error.Abort(_('at least one filename or pattern is required'))
@@ -378,7 +299,7 @@
ctx = scmutil.revsingle(repo, opts.get('rev'))
- fm = ui.formatter('annotate', opts)
+ rootfm = ui.formatter('annotate', opts)
if ui.quiet:
datefunc = util.shortdate
else:
@@ -388,7 +309,7 @@
if node is None:
return None
else:
- return fm.hexfunc(node)
+ return rootfm.hexfunc(node)
if opts.get('changeset'):
# omit "+" suffix which is appended to node hex
def formatrev(rev):
@@ -404,11 +325,11 @@
return '%d ' % rev
def formathex(hex):
if hex is None:
- return '%s+' % fm.hexfunc(ctx.p1().node())
+ return '%s+' % rootfm.hexfunc(ctx.p1().node())
else:
return '%s ' % hex
else:
- hexfn = fm.hexfunc
+ hexfn = rootfm.hexfunc
formatrev = formathex = str
opmap = [('user', ' ', lambda x: x[0].user(), ui.shortuser),
@@ -430,7 +351,7 @@
ui.pager('annotate')
- if fm.isplain():
+ if rootfm.isplain():
def makefunc(get, fmt):
return lambda x: fmt(get(x))
else:
@@ -450,15 +371,24 @@
follow = not opts.get('no_follow')
diffopts = patch.difffeatureopts(ui, opts, section='annotate',
whitespace=True)
+ skiprevs = opts.get('skip')
+ if skiprevs:
+ skiprevs = scmutil.revrange(repo, skiprevs)
+
for abs in ctx.walk(m):
fctx = ctx[abs]
- if not opts.get('text') and util.binary(fctx.data()):
- fm.plain(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
+ rootfm.startitem()
+ rootfm.data(abspath=abs, path=m.rel(abs))
+ if not opts.get('text') and fctx.isbinary():
+ rootfm.plain(_("%s: binary file\n")
+ % ((pats and m.rel(abs)) or abs))
continue
+ fm = rootfm.nested('lines')
lines = fctx.annotate(follow=follow, linenumber=linenumber,
- diffopts=diffopts)
+ skiprevs=skiprevs, diffopts=diffopts)
if not lines:
+ fm.end()
continue
formats = []
pieces = []
@@ -480,8 +410,9 @@
if not lines[-1][1].endswith('\n'):
fm.plain('\n')
-
- fm.end()
+ fm.end()
+
+ rootfm.end()
@command('archive',
[('', 'no-decode', None, _('do not pass files through decoders')),
@@ -532,6 +463,7 @@
Returns 0 on success.
'''
+ opts = pycompat.byteskwargs(opts)
ctx = scmutil.revsingle(repo, opts.get('rev'))
if not ctx:
raise error.Abort(_('no working directory: please specify a revision'))
@@ -627,6 +559,7 @@
release(lock, wlock)
def _dobackout(ui, repo, node=None, rev=None, **opts):
+ opts = pycompat.byteskwargs(opts)
if opts.get('commit') and opts.get('no_commit'):
raise error.Abort(_("cannot use --commit with --no-commit"))
if opts.get('merge') and opts.get('no_commit'):
@@ -702,7 +635,8 @@
def commitfunc(ui, repo, message, match, opts):
editform = 'backout'
- e = cmdutil.getcommiteditor(editform=editform, **opts)
+ e = cmdutil.getcommiteditor(editform=editform,
+ **pycompat.strkwargs(opts))
if not message:
# we don't translate commit messages
message = "Backed out changeset %s" % short(node)
@@ -834,10 +768,23 @@
bad = True
else:
reset = True
- elif extra or good + bad + skip + reset + extend + bool(command) > 1:
+ elif extra:
raise error.Abort(_('incompatible arguments'))
- cmdutil.checkunfinished(repo)
+ incompatibles = {
+ '--bad': bad,
+ '--command': bool(command),
+ '--extend': extend,
+ '--good': good,
+ '--reset': reset,
+ '--skip': skip,
+ }
+
+ enabled = [x for x in incompatibles if incompatibles[x]]
+
+ if len(enabled) > 1:
+ raise error.Abort(_('%s and %s are incompatible') %
+ tuple(sorted(enabled)[0:2]))
if reset:
hbisect.resetstate(repo)
@@ -865,6 +812,7 @@
"""common used update sequence"""
if noupdate:
return
+ cmdutil.checkunfinished(repo)
cmdutil.bailifchanged(repo)
return hg.clean(repo, node, show_stats=show_stats)
@@ -1002,6 +950,7 @@
hg book -f @
'''
+ opts = pycompat.byteskwargs(opts)
force = opts.get('force')
rev = opts.get('rev')
delete = opts.get('delete')
@@ -1047,6 +996,16 @@
and not force):
raise error.Abort(
_("a bookmark cannot have the name of an existing branch"))
+ if len(mark) > 3 and not force:
+ try:
+ shadowhash = (mark in repo)
+ except error.LookupError: # ambiguous identifier
+ shadowhash = False
+ if shadowhash:
+ repo.ui.warn(
+ _("bookmark %s matches a changeset hash\n"
+ "(did you leave a -r out of an 'hg bookmark' command?)\n")
+ % mark)
if delete and rename:
raise error.Abort(_("--delete and --rename are incompatible"))
@@ -1178,6 +1137,7 @@
Returns 0 on success.
"""
+ opts = pycompat.byteskwargs(opts)
if label:
label = label.strip()
@@ -1226,6 +1186,7 @@
Returns 0.
"""
+ opts = pycompat.byteskwargs(opts)
ui.pager('branches')
fm = ui.formatter('branches', opts)
hexfunc = fm.hexfunc
@@ -1309,6 +1270,7 @@
Returns 0 on success, 1 if no changes found.
"""
+ opts = pycompat.byteskwargs(opts)
revs = None
if 'rev' in opts:
revstrings = opts['rev']
@@ -1339,8 +1301,6 @@
base = ['null']
else:
base = scmutil.revrange(repo, opts.get('base'))
- # TODO: get desired bundlecaps from command line.
- bundlecaps = None
if cgversion not in changegroup.supportedoutgoingversions(repo):
raise error.Abort(_("repository does not support bundle version %s") %
cgversion)
@@ -1352,10 +1312,6 @@
common = [repo.lookup(rev) for rev in base]
heads = revs and map(repo.lookup, revs) or None
outgoing = discovery.outgoing(repo, common, heads)
- cg = changegroup.getchangegroup(repo, 'bundle', outgoing,
- bundlecaps=bundlecaps,
- version=cgversion)
- outgoing = None
else:
dest = ui.expandpath(dest or 'default-push', dest or 'default')
dest, branches = hg.parseurl(dest, opts.get('branch'))
@@ -1366,10 +1322,9 @@
onlyheads=heads,
force=opts.get('force'),
portable=True)
- cg = changegroup.getlocalchangegroup(repo, 'bundle', outgoing,
- bundlecaps, version=cgversion)
- if not cg:
- scmutil.nochangesfound(ui, repo, outgoing and outgoing.excluded)
+
+ if not outgoing.missing:
+ scmutil.nochangesfound(ui, repo, not base and outgoing.excluded)
return 1
if cgversion == '01': #bundle1
@@ -1392,15 +1347,20 @@
if complevel is not None:
compopts['level'] = complevel
- bundle2.writebundle(ui, cg, fname, bversion, compression=bcompression,
- compopts=compopts)
+
+ contentopts = {'cg.version': cgversion}
+ if repo.ui.configbool('experimental', 'evolution.bundle-obsmarker', False):
+ contentopts['obsolescence'] = True
+ bundle2.writenewbundle(ui, repo, 'bundle', fname, bversion, outgoing,
+ contentopts, compression=bcompression,
+ compopts=compopts)
@command('cat',
[('o', 'output', '',
_('print output to file with formatted name'), _('FORMAT')),
('r', 'rev', '', _('print the given revision'), _('REV')),
('', 'decode', None, _('apply any matching decode filter')),
- ] + walkopts,
+ ] + walkopts + formatteropts,
_('[OPTION]... FILE...'),
inferrepo=True)
def cat(ui, repo, file1, *pats, **opts):
@@ -1426,9 +1386,17 @@
"""
ctx = scmutil.revsingle(repo, opts.get('rev'))
m = scmutil.match(ctx, (file1,) + pats, opts)
-
- ui.pager('cat')
- return cmdutil.cat(ui, repo, ctx, m, '', **opts)
+ fntemplate = opts.pop('output', '')
+ if cmdutil.isstdiofilename(fntemplate):
+ fntemplate = ''
+
+ if fntemplate:
+ fm = formatter.nullformatter(ui, 'cat')
+ else:
+ ui.pager('cat')
+ fm = ui.formatter('cat', opts)
+ with fm:
+ return cmdutil.cat(ui, repo, ctx, m, fm, fntemplate, '', **opts)
@command('^clone',
[('U', 'noupdate', None, _('the clone will include an empty working '
@@ -1549,6 +1517,7 @@
Returns 0 on success.
"""
+ opts = pycompat.byteskwargs(opts)
if opts.get('noupdate') and opts.get('updaterev'):
raise error.Abort(_("cannot specify both --noupdate and --updaterev"))
@@ -1639,16 +1608,16 @@
release(lock, wlock)
def _docommit(ui, repo, *pats, **opts):
- opts = pycompat.byteskwargs(opts)
- if opts.get('interactive'):
- opts.pop('interactive')
+ if opts.get(r'interactive'):
+ opts.pop(r'interactive')
ret = cmdutil.dorecord(ui, repo, commit, None, False,
cmdutil.recordfilter, *pats,
- **pycompat.strkwargs(opts))
+ **opts)
# ret can be 0 (no changes to record) or the value returned by
# commit(), 1 if nothing changed or None on success.
return 1 if ret == 0 else ret
+ opts = pycompat.byteskwargs(opts)
if opts.get('subrepos'):
if opts.get('amend'):
raise error.Abort(_('cannot amend with --subrepos'))
@@ -1769,6 +1738,7 @@
"""
+ opts = pycompat.byteskwargs(opts)
if opts.get('edit') or opts.get('local') or opts.get('global'):
if opts.get('local') and opts.get('global'):
raise error.Abort(_("can't use --local and --global together"))
@@ -1871,9 +1841,46 @@
Returns 0 on success, 1 if errors are encountered.
"""
+ opts = pycompat.byteskwargs(opts)
with repo.wlock(False):
return cmdutil.copy(ui, repo, pats, opts)
+@command('debugcommands', [], _('[COMMAND]'), norepo=True)
+def debugcommands(ui, cmd='', *args):
+ """list all available commands and options"""
+ for cmd, vals in sorted(table.iteritems()):
+ cmd = cmd.split('|')[0].strip('^')
+ opts = ', '.join([i[1] for i in vals[1]])
+ ui.write('%s: %s\n' % (cmd, opts))
+
+@command('debugcomplete',
+ [('o', 'options', None, _('show the command options'))],
+ _('[-o] CMD'),
+ norepo=True)
+def debugcomplete(ui, cmd='', **opts):
+ """returns the completion list associated with the given command"""
+
+ if opts.get('options'):
+ options = []
+ otables = [globalopts]
+ if cmd:
+ aliases, entry = cmdutil.findcmd(cmd, table, False)
+ otables.append(entry[1])
+ for t in otables:
+ for o in t:
+ if "(DEPRECATED)" in o[3]:
+ continue
+ if o[0]:
+ options.append('-%s' % o[0])
+ options.append('--%s' % o[1])
+ ui.write("%s\n" % "\n".join(options))
+ return
+
+ cmdlist, unused_allcmds = cmdutil.findpossible(cmd, table)
+ if ui.verbose:
+ cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
+ ui.write("%s\n" % "\n".join(sorted(cmdlist)))
+
@command('^diff',
[('r', 'rev', [], _('revision'), _('REV')),
('c', 'change', '', _('change made by revision'), _('REV'))
@@ -1938,6 +1945,7 @@
Returns 0 on success.
"""
+ opts = pycompat.byteskwargs(opts)
revs = opts.get('rev')
change = opts.get('change')
stat = opts.get('stat')
@@ -2041,7 +2049,7 @@
else:
ui.note(_('exporting patch:\n'))
ui.pager('export')
- cmdutil.export(repo, revs, template=opts.get('output'),
+ cmdutil.export(repo, revs, fntemplate=opts.get('output'),
switch_parent=opts.get('switch_parent'),
opts=patch.diffallopts(ui, opts))
@@ -2094,7 +2102,9 @@
Returns 0 if a match is found, 1 otherwise.
"""
- ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
+
+ opts = pycompat.byteskwargs(opts)
+ ctx = scmutil.revsingle(repo, opts.get('rev'), None)
end = '\n'
if opts.get('print0'):
@@ -2136,6 +2146,7 @@
Returns 0 on success.
"""
+ opts = pycompat.byteskwargs(opts)
if not pats:
raise error.Abort(_('no files specified'))
@@ -2220,6 +2231,7 @@
return _dograft(ui, repo, *revs, **opts)
def _dograft(ui, repo, *revs, **opts):
+ opts = pycompat.byteskwargs(opts)
if revs and opts.get('rev'):
ui.warn(_('warning: inconsistent use of --rev might give unexpected '
'revision ordering!\n'))
@@ -2232,7 +2244,8 @@
if not opts.get('date') and opts.get('currentdate'):
opts['date'] = "%d %d" % util.makedate()
- editor = cmdutil.getcommiteditor(editform='graft', **opts)
+ editor = cmdutil.getcommiteditor(editform='graft',
+ **pycompat.strkwargs(opts))
cont = False
if opts.get('continue'):
@@ -2439,6 +2452,7 @@
Returns 0 if a match is found, 1 otherwise.
"""
+ opts = pycompat.byteskwargs(opts)
reflags = re.M
if opts.get('ignore_case'):
reflags |= re.I
@@ -2685,6 +2699,7 @@
Returns 0 if matching heads are found, 1 if not.
"""
+ opts = pycompat.byteskwargs(opts)
start = None
if 'rev' in opts:
start = scmutil.revsingle(repo, opts['rev'], None).node()
@@ -2743,7 +2758,7 @@
Returns 0 if successful.
"""
- keep = opts.get('system') or []
+ keep = opts.get(r'system') or []
if len(keep) == 0:
if pycompat.sysplatform.startswith('win'):
keep.append('windows')
@@ -2757,7 +2772,8 @@
if ui.verbose:
keep.append('verbose')
- formatted = help.formattedhelp(ui, name, keep=keep, **opts)
+ commands = sys.modules[__name__]
+ formatted = help.formattedhelp(ui, commands, name, keep=keep, **opts)
ui.pager('help')
ui.write(formatted)
@@ -2810,6 +2826,7 @@
Returns 0 if successful.
"""
+ opts = pycompat.byteskwargs(opts)
if not repo and not source:
raise error.Abort(_("there is no Mercurial repository here "
"(.hg not found)"))
@@ -2878,12 +2895,13 @@
('+'.join([hexfunc(p.node()) for p in parents]), changed)]
if num:
output.append("%s%s" %
- ('+'.join([str(p.rev()) for p in parents]), changed))
+ ('+'.join([pycompat.bytestr(p.rev()) for p in parents]),
+ changed))
else:
if default or id:
output = [hexfunc(ctx.node())]
if num:
- output.append(str(ctx.rev()))
+ output.append(pycompat.bytestr(ctx.rev()))
taglist = ctx.tags()
if default and not ui.quiet:
@@ -3033,6 +3051,7 @@
Returns 0 on success, 1 on partial success (see --partial).
"""
+ opts = pycompat.byteskwargs(opts)
if not patch1:
raise error.Abort(_('need at least one patch to import'))
@@ -3237,6 +3256,7 @@
Returns 0 on success.
"""
+ opts = pycompat.byteskwargs(opts)
hg.peer(ui, opts, ui.expandpath(dest), create=True)
@command('locate',
@@ -3267,6 +3287,7 @@
Returns 0 if a match is found, 1 otherwise.
"""
+ opts = pycompat.byteskwargs(opts)
if opts.get('print0'):
end = '\0'
else:
@@ -3473,6 +3494,7 @@
Returns 0 on success.
"""
+ opts = pycompat.byteskwargs(opts)
fm = ui.formatter('manifest', opts)
if opts.get('all'):
@@ -3551,6 +3573,7 @@
Returns 0 on success, 1 if there are unresolved files.
"""
+ opts = pycompat.byteskwargs(opts)
if opts.get('rev') and node:
raise error.Abort(_("please specify just one revision"))
if not node:
@@ -3630,6 +3653,7 @@
Returns 0 if there are outgoing changes, 1 otherwise.
"""
+ opts = pycompat.byteskwargs(opts)
if opts.get('graph'):
cmdutil.checkunsupportedgraphflags([], opts)
o, other = hg._outgoing(ui, repo, dest, opts)
@@ -3687,6 +3711,7 @@
Returns 0 on success.
"""
+ opts = pycompat.byteskwargs(opts)
ctx = scmutil.revsingle(repo, opts.get('rev'), None)
if file_:
@@ -3749,6 +3774,8 @@
Returns 0 on success.
"""
+
+ opts = pycompat.byteskwargs(opts)
ui.pager('paths')
if search:
pathitems = [(name, path) for name, path in ui.paths.iteritems()
@@ -3811,6 +3838,7 @@
(For more information about the phases concept, see :hg:`help phases`.)
"""
+ opts = pycompat.byteskwargs(opts)
# search for a unique phase argument
targetphase = None
for idx, name in enumerate(phases.phasenames):
@@ -3943,6 +3971,7 @@
Returns 0 on success, 1 if an update had unresolved files.
"""
+ opts = pycompat.byteskwargs(opts)
if ui.configbool('commands', 'update.requiredest') and opts.get('update'):
msg = _('update destination required by configuration')
hint = _('use hg pull followed by hg update DEST')
@@ -4073,6 +4102,7 @@
Returns 0 if push was successful, 1 if nothing to push.
"""
+ opts = pycompat.byteskwargs(opts)
if opts.get('bookmark'):
ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
for b in opts['bookmark']:
@@ -4198,6 +4228,7 @@
Returns 0 on success, 1 if any warnings encountered.
"""
+ opts = pycompat.byteskwargs(opts)
after, force = opts.get('after'), opts.get('force')
if not pats and not after:
raise error.Abort(_('no files specified'))
@@ -4227,6 +4258,7 @@
Returns 0 on success, 1 if errors are encountered.
"""
+ opts = pycompat.byteskwargs(opts)
with repo.wlock(False):
return cmdutil.copy(ui, repo, pats, opts, rename=True)
@@ -4281,6 +4313,7 @@
Returns 0 on success, 1 if any files fail a resolve attempt.
"""
+ opts = pycompat.byteskwargs(opts)
flaglist = 'all mark unmark list no_status'.split()
all, mark, unmark, show, nostatus = \
[opts.get(o) for o in flaglist]
@@ -4586,8 +4619,8 @@
if not ui.configbool('ui', 'rollback', True):
raise error.Abort(_('rollback is disabled because it is unsafe'),
hint=('see `hg help -v rollback` for information'))
- return repo.rollback(dryrun=opts.get('dry_run'),
- force=opts.get('force'))
+ return repo.rollback(dryrun=opts.get(r'dry_run'),
+ force=opts.get(r'force'))
@command('root', [])
def root(ui, repo):
@@ -4652,6 +4685,7 @@
Returns 0 on success.
"""
+ opts = pycompat.byteskwargs(opts)
if opts["stdio"] and opts["cmdserver"]:
raise error.Abort(_("cannot use --stdio with --cmdserver"))
@@ -4817,6 +4851,7 @@
Returns 0 on success.
"""
+ opts = pycompat.byteskwargs(opts)
ui.pager('summary')
ctx = repo[None]
parents = ctx.parents()
@@ -5125,6 +5160,7 @@
Returns 0 on success.
"""
+ opts = pycompat.byteskwargs(opts)
wlock = lock = None
try:
wlock = repo.wlock()
@@ -5189,7 +5225,8 @@
editform = 'tag.remove'
else:
editform = 'tag.add'
- editor = cmdutil.getcommiteditor(editform=editform, **opts)
+ editor = cmdutil.getcommiteditor(editform=editform,
+ **pycompat.strkwargs(opts))
# don't allow tagging the null rev
if (not opts.get('remove') and
@@ -5212,6 +5249,7 @@
Returns 0 on success.
"""
+ opts = pycompat.byteskwargs(opts)
ui.pager('tags')
fm = ui.formatter('tags', opts)
hexfunc = fm.hexfunc
@@ -5256,6 +5294,7 @@
Returns 0 on success.
"""
+ opts = pycompat.byteskwargs(opts)
displayer = cmdutil.show_changeset(ui, repo, opts)
displayer.show(repo['tip'])
displayer.close()
@@ -5277,33 +5316,33 @@
for fname in fnames:
f = hg.openpath(ui, fname)
gen = exchange.readbundle(ui, f, fname)
- if isinstance(gen, bundle2.unbundle20):
- tr = repo.transaction('unbundle')
- try:
- op = bundle2.applybundle(repo, gen, tr, source='unbundle',
- url='bundle:' + fname)
- tr.close()
- except error.BundleUnknownFeatureError as exc:
- raise error.Abort(_('%s: unknown bundle feature, %s')
- % (fname, exc),
- hint=_("see https://mercurial-scm.org/"
- "wiki/BundleFeature for more "
- "information"))
- finally:
- if tr:
- tr.release()
- changes = [r.get('return', 0)
- for r in op.records['changegroup']]
- modheads = changegroup.combineresults(changes)
- elif isinstance(gen, streamclone.streamcloneapplier):
+ if isinstance(gen, streamclone.streamcloneapplier):
raise error.Abort(
_('packed bundles cannot be applied with '
'"hg unbundle"'),
hint=_('use "hg debugapplystreamclonebundle"'))
+ url = 'bundle:' + fname
+ if isinstance(gen, bundle2.unbundle20):
+ with repo.transaction('unbundle') as tr:
+ try:
+ op = bundle2.applybundle(repo, gen, tr,
+ source='unbundle',
+ url=url)
+ except error.BundleUnknownFeatureError as exc:
+ raise error.Abort(
+ _('%s: unknown bundle feature, %s') % (fname, exc),
+ hint=_("see https://mercurial-scm.org/"
+ "wiki/BundleFeature for more "
+ "information"))
+ changes = [r.get('return', 0)
+ for r in op.records['changegroup']]
+ modheads = changegroup.combineresults(changes)
else:
- modheads = gen.apply(repo, 'unbundle', 'bundle:' + fname)
-
- return postincoming(ui, repo, modheads, opts.get('update'), None, None)
+ txnname = 'unbundle\n%s' % util.hidepassword(url)
+ with repo.transaction(txnname) as tr:
+ modheads = gen.apply(repo, tr, 'unbundle', url)
+
+ return postincoming(ui, repo, modheads, opts.get(r'update'), None, None)
@command('^update|up|checkout|co',
[('C', 'clean', None, _('discard uncommitted changes (no backup)')),
@@ -5430,6 +5469,7 @@
@command('version', [] + formatteropts, norepo=True)
def version_(ui, **opts):
"""output version and copyright information"""
+ opts = pycompat.byteskwargs(opts)
if ui.verbose:
ui.pager('version')
fm = ui.formatter("version", opts)
--- a/mercurial/commandserver.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/commandserver.py Tue Jun 20 16:33:46 2017 -0400
@@ -409,14 +409,13 @@
def bindsocket(self, sock, address):
util.bindunixsocket(sock, address)
+ sock.listen(socket.SOMAXCONN)
+ self.ui.status(_('listening at %s\n') % address)
+ self.ui.flush() # avoid buffering of status message
def unlinksocket(self, address):
os.unlink(address)
- def printbanner(self, address):
- self.ui.status(_('listening at %s\n') % address)
- self.ui.flush() # avoid buffering of status message
-
def shouldexit(self):
"""True if server should shut down; checked per pollinterval"""
return False
@@ -452,10 +451,8 @@
def init(self):
self._sock = socket.socket(socket.AF_UNIX)
self._servicehandler.bindsocket(self._sock, self.address)
- self._sock.listen(socket.SOMAXCONN)
o = signal.signal(signal.SIGCHLD, self._sigchldhandler)
self._oldsigchldhandler = o
- self._servicehandler.printbanner(self.address)
self._socketunlinked = False
def _unlinksocket(self):
--- a/mercurial/config.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/config.py Tue Jun 20 16:33:46 2017 -0400
@@ -68,7 +68,7 @@
def sections(self):
return sorted(self._data.keys())
def items(self, section):
- return self._data.get(section, {}).items()
+ return list(self._data.get(section, {}).iteritems())
def set(self, section, item, value, source=""):
if pycompat.ispy3:
assert not isinstance(value, str), (
--- a/mercurial/context.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/context.py Tue Jun 20 16:33:46 2017 -0400
@@ -23,6 +23,7 @@
short,
wdirid,
wdirnodes,
+ wdirrev,
)
from . import (
encoding,
@@ -77,7 +78,7 @@
return self.rev()
def __repr__(self):
- return "<%s %s>" % (type(self).__name__, str(self))
+ return r"<%s %s>" % (type(self).__name__, str(self))
def __eq__(self, other):
try:
@@ -257,13 +258,13 @@
return changectx(self._repo, nullrev)
def _fileinfo(self, path):
- if '_manifest' in self.__dict__:
+ if r'_manifest' in self.__dict__:
try:
return self._manifest[path], self._manifest.flags(path)
except KeyError:
raise error.ManifestLookupError(self._node, path,
_('not found in manifest'))
- if '_manifestdelta' in self.__dict__ or path in self.files():
+ if r'_manifestdelta' in self.__dict__ or path in self.files():
if path in self._manifestdelta:
return (self._manifestdelta[path],
self._manifestdelta.flags(path))
@@ -300,8 +301,6 @@
def match(self, pats=None, include=None, exclude=None, default='glob',
listsubrepos=False, badfn=None):
- if pats is None:
- pats = []
r = self._repo
return matchmod.match(r.root, r.getcwd(), pats,
include, exclude, default,
@@ -323,9 +322,6 @@
def hasdir(self, dir):
return self._manifest.hasdir(dir)
- def dirty(self, missing=False, merge=True, branch=True):
- return False
-
def status(self, other=None, match=None, listignored=False,
listclean=False, listunknown=False, listsubrepos=False):
"""return status of files between two nodes or node and working
@@ -389,24 +385,6 @@
return r
-
-def makememctx(repo, parents, text, user, date, branch, files, store,
- editor=None, extra=None):
- def getfilectx(repo, memctx, path):
- data, mode, copied = store.getfile(path)
- if data is None:
- return None
- islink, isexec = mode
- return memfilectx(repo, path, data, islink=islink, isexec=isexec,
- copied=copied, memctx=memctx)
- if extra is None:
- extra = {}
- if branch:
- extra['branch'] = encoding.fromlocal(branch)
- ctx = memctx(repo, parents, text, files, getfilectx, user,
- date, extra, editor)
- return ctx
-
def _filterederror(repo, changeid):
"""build an exception to be raised about a filtered changeid
@@ -474,7 +452,7 @@
l = len(repo.changelog)
if r < 0:
r += l
- if r < 0 or r >= l:
+ if r < 0 or r >= l and r != wdirrev:
raise ValueError
self._rev = r
self._node = repo.changelog.node(r)
@@ -551,7 +529,7 @@
def _manifest(self):
return self._manifestctx.read()
- @propertycache
+ @property
def _manifestctx(self):
return self._repo.manifestlog[self._changeset.manifest]
@@ -687,21 +665,20 @@
in the repo,
workingfilectx: a filecontext that represents files from the working
directory,
- memfilectx: a filecontext that represents files in-memory."""
- def __new__(cls, repo, path, *args, **kwargs):
- return super(basefilectx, cls).__new__(cls)
-
+ memfilectx: a filecontext that represents files in-memory,
+ overlayfilectx: duplicate another filecontext with some fields overridden.
+ """
@propertycache
def _filelog(self):
return self._repo.file(self._path)
@propertycache
def _changeid(self):
- if '_changeid' in self.__dict__:
+ if r'_changeid' in self.__dict__:
return self._changeid
- elif '_changectx' in self.__dict__:
+ elif r'_changectx' in self.__dict__:
return self._changectx.rev()
- elif '_descendantrev' in self.__dict__:
+ elif r'_descendantrev' in self.__dict__:
# this file context was created from a revision with a known
# descendant, we can (lazily) correct for linkrev aliases
return self._adjustlinkrev(self._descendantrev)
@@ -710,7 +687,7 @@
@propertycache
def _filenode(self):
- if '_fileid' in self.__dict__:
+ if r'_fileid' in self.__dict__:
return self._filelog.lookup(self._fileid)
else:
return self._changectx.filenode(self._path)
@@ -762,8 +739,11 @@
return self._filerev
def filenode(self):
return self._filenode
+ @propertycache
+ def _flags(self):
+ return self._changectx.flags(self._path)
def flags(self):
- return self._changectx.flags(self._path)
+ return self._flags
def filelog(self):
return self._filelog
def rev(self):
@@ -794,8 +774,12 @@
return self._changectx.manifest()
def changectx(self):
return self._changectx
+ def renamed(self):
+ return self._copied
def repo(self):
return self._repo
+ def size(self):
+ return len(self.data())
def path(self):
return self._path
@@ -943,7 +927,8 @@
return p[1]
return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
- def annotate(self, follow=False, linenumber=False, diffopts=None):
+ def annotate(self, follow=False, linenumber=False, skiprevs=None,
+ diffopts=None):
'''returns a list of tuples of ((ctx, number), line) for each line
in the file, where ctx is the filectx of the node where
that line was last changed; if linenumber parameter is true, number is
@@ -963,15 +948,6 @@
def decorate(text, rev):
return ([(rev, False)] * lines(text), text)
- def pair(parent, child):
- blocks = mdiff.allblocks(parent[1], child[1], opts=diffopts)
- for (a1, a2, b1, b2), t in blocks:
- # Changed blocks ('!') or blocks made only of blank lines ('~')
- # belong to the child.
- if t == '=':
- child[0][b1:b2] = parent[0][a1:a2]
- return child
-
getlog = util.lrucachefunc(lambda x: self._repo.file(x))
def parents(f):
@@ -1047,8 +1023,12 @@
if ready:
visit.pop()
curr = decorate(f.data(), f)
+ skipchild = False
+ if skiprevs is not None:
+ skipchild = f._changeid in skiprevs
+ curr = _annotatepair([hist[p] for p in pl], f, curr, skipchild,
+ diffopts)
for p in pl:
- curr = pair(hist[p], curr)
if needed[p] == 1:
del hist[p]
del needed[p]
@@ -1076,6 +1056,116 @@
c = visit.pop(max(visit))
yield c
+def _annotatepair(parents, childfctx, child, skipchild, diffopts):
+ r'''
+ Given parent and child fctxes and annotate data for parents, for all lines
+ in either parent that match the child, annotate the child with the parent's
+ data.
+
+ Additionally, if `skipchild` is True, replace all other lines with parent
+ annotate data as well such that child is never blamed for any lines.
+
+ >>> oldfctx = 'old'
+ >>> p1fctx, p2fctx, childfctx = 'p1', 'p2', 'c'
+ >>> olddata = 'a\nb\n'
+ >>> p1data = 'a\nb\nc\n'
+ >>> p2data = 'a\nc\nd\n'
+ >>> childdata = 'a\nb2\nc\nc2\nd\n'
+ >>> diffopts = mdiff.diffopts()
+
+ >>> def decorate(text, rev):
+ ... return ([(rev, i) for i in xrange(1, text.count('\n') + 1)], text)
+
+ Basic usage:
+
+ >>> oldann = decorate(olddata, oldfctx)
+ >>> p1ann = decorate(p1data, p1fctx)
+ >>> p1ann = _annotatepair([oldann], p1fctx, p1ann, False, diffopts)
+ >>> p1ann[0]
+ [('old', 1), ('old', 2), ('p1', 3)]
+ >>> p2ann = decorate(p2data, p2fctx)
+ >>> p2ann = _annotatepair([oldann], p2fctx, p2ann, False, diffopts)
+ >>> p2ann[0]
+ [('old', 1), ('p2', 2), ('p2', 3)]
+
+ Test with multiple parents (note the difference caused by ordering):
+
+ >>> childann = decorate(childdata, childfctx)
+ >>> childann = _annotatepair([p1ann, p2ann], childfctx, childann, False,
+ ... diffopts)
+ >>> childann[0]
+ [('old', 1), ('c', 2), ('p2', 2), ('c', 4), ('p2', 3)]
+
+ >>> childann = decorate(childdata, childfctx)
+ >>> childann = _annotatepair([p2ann, p1ann], childfctx, childann, False,
+ ... diffopts)
+ >>> childann[0]
+ [('old', 1), ('c', 2), ('p1', 3), ('c', 4), ('p2', 3)]
+
+ Test with skipchild (note the difference caused by ordering):
+
+ >>> childann = decorate(childdata, childfctx)
+ >>> childann = _annotatepair([p1ann, p2ann], childfctx, childann, True,
+ ... diffopts)
+ >>> childann[0]
+ [('old', 1), ('old', 2), ('p2', 2), ('p2', 2), ('p2', 3)]
+
+ >>> childann = decorate(childdata, childfctx)
+ >>> childann = _annotatepair([p2ann, p1ann], childfctx, childann, True,
+ ... diffopts)
+ >>> childann[0]
+ [('old', 1), ('old', 2), ('p1', 3), ('p1', 3), ('p2', 3)]
+ '''
+ pblocks = [(parent, mdiff.allblocks(parent[1], child[1], opts=diffopts))
+ for parent in parents]
+
+ if skipchild:
+ # Need to iterate over the blocks twice -- make it a list
+ pblocks = [(p, list(blocks)) for (p, blocks) in pblocks]
+ # Mercurial currently prefers p2 over p1 for annotate.
+ # TODO: change this?
+ for parent, blocks in pblocks:
+ for (a1, a2, b1, b2), t in blocks:
+ # Changed blocks ('!') or blocks made only of blank lines ('~')
+ # belong to the child.
+ if t == '=':
+ child[0][b1:b2] = parent[0][a1:a2]
+
+ if skipchild:
+ # Now try and match up anything that couldn't be matched,
+ # Reversing pblocks maintains bias towards p2, matching above
+ # behavior.
+ pblocks.reverse()
+
+ # The heuristics are:
+ # * Work on blocks of changed lines (effectively diff hunks with -U0).
+ # This could potentially be smarter but works well enough.
+ # * For a non-matching section, do a best-effort fit. Match lines in
+ # diff hunks 1:1, dropping lines as necessary.
+ # * Repeat the last line as a last resort.
+
+ # First, replace as much as possible without repeating the last line.
+ remaining = [(parent, []) for parent, _blocks in pblocks]
+ for idx, (parent, blocks) in enumerate(pblocks):
+ for (a1, a2, b1, b2), _t in blocks:
+ if a2 - a1 >= b2 - b1:
+ for bk in xrange(b1, b2):
+ if child[0][bk][0] == childfctx:
+ ak = min(a1 + (bk - b1), a2 - 1)
+ child[0][bk] = parent[0][ak]
+ else:
+ remaining[idx][1].append((a1, a2, b1, b2))
+
+ # Then, look at anything left, which might involve repeating the last
+ # line.
+ for parent, blocks in remaining:
+ for a1, a2, b1, b2 in blocks:
+ for bk in xrange(b1, b2):
+ if child[0][bk][0] == childfctx:
+ ak = min(a1 + (bk - b1), a2 - 1)
+ child[0][bk] = parent[0][ak]
+ return child
+
class filectx(basefilectx):
"""A filecontext object makes access to data related to a particular
filerevision convenient."""
@@ -1134,6 +1224,10 @@
def rawdata(self):
return self._filelog.revision(self._filenode, raw=True)
+ def rawflags(self):
+ """low-level revlog flags"""
+ return self._filelog.flags(self._filerev)
+
def data(self):
try:
return self._filelog.read(self._filenode)
@@ -1146,7 +1240,8 @@
def size(self):
return self._filelog.size(self._filerev)
- def renamed(self):
+ @propertycache
+ def _copied(self):
"""check if file was actually renamed in this changeset revision
If rename logged in file revision, we report copy for changeset only
@@ -1177,89 +1272,6 @@
return [filectx(self._repo, self._path, fileid=x,
filelog=self._filelog) for x in c]
-def _changesrange(fctx1, fctx2, linerange2, diffopts):
- """Return `(diffinrange, linerange1)` where `diffinrange` is True
- if diff from fctx2 to fctx1 has changes in linerange2 and
- `linerange1` is the new line range for fctx1.
- """
- blocks = mdiff.allblocks(fctx1.data(), fctx2.data(), diffopts)
- filteredblocks, linerange1 = mdiff.blocksinrange(blocks, linerange2)
- diffinrange = any(stype == '!' for _, stype in filteredblocks)
- return diffinrange, linerange1
-
-def blockancestors(fctx, fromline, toline, followfirst=False):
- """Yield ancestors of `fctx` with respect to the block of lines within
- `fromline`-`toline` range.
- """
- diffopts = patch.diffopts(fctx._repo.ui)
- introrev = fctx.introrev()
- if fctx.rev() != introrev:
- fctx = fctx.filectx(fctx.filenode(), changeid=introrev)
- visit = {(fctx.linkrev(), fctx.filenode()): (fctx, (fromline, toline))}
- while visit:
- c, linerange2 = visit.pop(max(visit))
- pl = c.parents()
- if followfirst:
- pl = pl[:1]
- if not pl:
- # The block originates from the initial revision.
- yield c, linerange2
- continue
- inrange = False
- for p in pl:
- inrangep, linerange1 = _changesrange(p, c, linerange2, diffopts)
- inrange = inrange or inrangep
- if linerange1[0] == linerange1[1]:
- # Parent's linerange is empty, meaning that the block got
- # introduced in this revision; no need to go futher in this
- # branch.
- continue
- # Set _descendantrev with 'c' (a known descendant) so that, when
- # _adjustlinkrev is called for 'p', it receives this descendant
- # (as srcrev) instead possibly topmost introrev.
- p._descendantrev = c.rev()
- visit[p.linkrev(), p.filenode()] = p, linerange1
- if inrange:
- yield c, linerange2
-
-def blockdescendants(fctx, fromline, toline):
- """Yield descendants of `fctx` with respect to the block of lines within
- `fromline`-`toline` range.
- """
- # First possibly yield 'fctx' if it has changes in range with respect to
- # its parents.
- try:
- c, linerange1 = next(blockancestors(fctx, fromline, toline))
- except StopIteration:
- pass
- else:
- if c == fctx:
- yield c, linerange1
-
- diffopts = patch.diffopts(fctx._repo.ui)
- fl = fctx.filelog()
- seen = {fctx.filerev(): (fctx, (fromline, toline))}
- for i in fl.descendants([fctx.filerev()]):
- c = fctx.filectx(i)
- inrange = False
- for x in fl.parentrevs(i):
- try:
- p, linerange2 = seen[x]
- except KeyError:
- # nullrev or other branch
- continue
- inrangep, linerange1 = _changesrange(c, p, linerange2, diffopts)
- inrange = inrange or inrangep
- # If revision 'i' has been seen (it's a merge), we assume that its
- # line range is the same independently of which parents was used
- # to compute it.
- assert i not in seen or seen[i][1] == linerange1, (
- 'computed line range for %s is not consistent between '
- 'ancestor branches' % c)
- seen[i] = c, linerange1
- if inrange:
- yield c, linerange1
-
class committablectx(basectx):
"""A committablectx object provides common functionality for a context that
wants the ability to commit, e.g. workingctx or memctx."""
@@ -1289,7 +1301,10 @@
self._extra['branch'] = 'default'
def __str__(self):
- return str(self._parents[0]) + "+"
+ return str(self._parents[0]) + r"+"
+
+ def __bytes__(self):
+ return bytes(self._parents[0]) + "+"
def __nonzero__(self):
return True
@@ -1342,7 +1357,11 @@
@propertycache
def _date(self):
- return util.makedate()
+ ui = self._repo.ui
+ date = ui.configdate('devel', 'default-date')
+ if date is None:
+ date = util.makedate()
+ return date
def subrev(self, subpath):
return None
@@ -1396,7 +1415,7 @@
return []
def flags(self, path):
- if '_manifest' in self.__dict__:
+ if r'_manifest' in self.__dict__:
try:
return self._manifest.flags(path)
except KeyError:
@@ -1436,19 +1455,21 @@
"""
- self._repo.dirstate.beginparentchange()
- for f in self.modified() + self.added():
- self._repo.dirstate.normal(f)
- for f in self.removed():
- self._repo.dirstate.drop(f)
- self._repo.dirstate.setparents(node)
- self._repo.dirstate.endparentchange()
+ with self._repo.dirstate.parentchange():
+ for f in self.modified() + self.added():
+ self._repo.dirstate.normal(f)
+ for f in self.removed():
+ self._repo.dirstate.drop(f)
+ self._repo.dirstate.setparents(node)
# write changes out explicitly, because nesting wlock at
# runtime may prevent 'wlock.release()' in 'repo.commit()'
# from immediately doing so for subsequent changing files
self._repo.dirstate.write(self._repo.currenttransaction())
+ def dirty(self, missing=False, merge=True, branch=True):
+ return False
+
class workingctx(committablectx):
"""A workingctx object makes access to data related to
the current working directory convenient.
@@ -1577,19 +1598,15 @@
def match(self, pats=None, include=None, exclude=None, default='glob',
listsubrepos=False, badfn=None):
- if pats is None:
- pats = []
r = self._repo
# Only a case insensitive filesystem needs magic to translate user input
# to actual case in the filesystem.
- matcherfunc = matchmod.match
- if not util.fscasesensitive(r.root):
- matcherfunc = matchmod.icasefsmatcher
- return matcherfunc(r.root, r.getcwd(), pats,
- include, exclude, default,
- auditor=r.auditor, ctx=self,
- listsubrepos=listsubrepos, badfn=badfn)
+ icasefs = not util.fscasesensitive(r.root)
+ return matchmod.match(r.root, r.getcwd(), pats, include, exclude,
+ default, auditor=r.auditor, ctx=self,
+ listsubrepos=listsubrepos, badfn=badfn,
+ icasefs=icasefs)
def _filtersuspectsymlink(self, files):
if not files or self._repo.dirstate._checklink:
@@ -1638,25 +1655,47 @@
# it's in the dirstate.
deleted.append(f)
- # update dirstate for files that are actually clean
- if fixup:
+ return modified, deleted, fixup
+
+ def _poststatusfixup(self, status, fixup):
+ """update dirstate for files that are actually clean"""
+ poststatus = self._repo.postdsstatus()
+ if fixup or poststatus:
try:
+ oldid = self._repo.dirstate.identity()
+
# updating the dirstate is optional
# so we don't wait on the lock
# wlock can invalidate the dirstate, so cache normal _after_
# taking the lock
with self._repo.wlock(False):
- normal = self._repo.dirstate.normal
- for f in fixup:
- normal(f)
- # write changes out explicitly, because nesting
- # wlock at runtime may prevent 'wlock.release()'
- # after this block from doing so for subsequent
- # changing files
- self._repo.dirstate.write(self._repo.currenttransaction())
+ if self._repo.dirstate.identity() == oldid:
+ if fixup:
+ normal = self._repo.dirstate.normal
+ for f in fixup:
+ normal(f)
+ # write changes out explicitly, because nesting
+ # wlock at runtime may prevent 'wlock.release()'
+ # after this block from doing so for subsequent
+ # changing files
+ tr = self._repo.currenttransaction()
+ self._repo.dirstate.write(tr)
+
+ if poststatus:
+ for ps in poststatus:
+ ps(self, status)
+ else:
+ # in this case, writing changes out breaks
+ # consistency, because .hg/dirstate was
+ # already changed simultaneously after last
+ # caching (see also issue5584 for detail)
+ self._repo.ui.debug('skip updating dirstate: '
+ 'identity mismatch\n')
except error.LockError:
pass
- return modified, deleted, fixup
+ finally:
+ # Even if the wlock couldn't be grabbed, clear out the list.
+ self._repo.clearpostdsstatus()
def _dirstatestatus(self, match=None, ignored=False, clean=False,
unknown=False):
@@ -1670,15 +1709,17 @@
listclean, listunknown)
# check for any possibly clean files
+ fixup = []
if cmp:
modified2, deleted2, fixup = self._checklookup(cmp)
s.modified.extend(modified2)
s.deleted.extend(deleted2)
- # update dirstate for files that are actually clean
if fixup and listclean:
s.clean.extend(fixup)
+ self._poststatusfixup(s, fixup)
+
if match.always():
# cache for performance
if s.unknown or s.ignored or s.clean:
@@ -1912,6 +1953,41 @@
return getfilectx
+def memfilefromctx(ctx):
+ """Given a context return a memfilectx for ctx[path]
+
+ This is a convenience method for building a memctx based on another
+ context.
+ """
+ def getfilectx(repo, memctx, path):
+ fctx = ctx[path]
+ # this is weird but apparently we only keep track of one parent
+ # (why not only store that instead of a tuple?)
+ copied = fctx.renamed()
+ if copied:
+ copied = copied[0]
+ return memfilectx(repo, path, fctx.data(),
+ islink=fctx.islink(), isexec=fctx.isexec(),
+ copied=copied, memctx=memctx)
+
+ return getfilectx
+
+def memfilefrompatch(patchstore):
+ """Given a patch (e.g. patchstore object) return a memfilectx
+
+ This is a convenience method for building a memctx based on a patchstore.
+ """
+ def getfilectx(repo, memctx, path):
+ data, mode, copied = patchstore.getfile(path)
+ if data is None:
+ return None
+ islink, isexec = mode
+ return memfilectx(repo, path, data, islink=islink,
+ isexec=isexec, copied=copied,
+ memctx=memctx)
+
+ return getfilectx
+
class memctx(committablectx):
"""Use memctx to perform in-memory commits via localrepo.commitctx().
@@ -1946,7 +2022,7 @@
_returnnoneformissingfiles = True
def __init__(self, repo, parents, text, files, filectxfn, user=None,
- date=None, extra=None, editor=False):
+ date=None, extra=None, branch=None, editor=False):
super(memctx, self).__init__(repo, text, user, date, extra)
self._rev = None
self._node = None
@@ -1955,32 +2031,18 @@
self._parents = [changectx(self._repo, p) for p in (p1, p2)]
files = sorted(set(files))
self._files = files
+ if branch is not None:
+ self._extra['branch'] = encoding.fromlocal(branch)
self.substate = {}
- # if store is not callable, wrap it in a function
- if not callable(filectxfn):
- def getfilectx(repo, memctx, path):
- fctx = filectxfn[path]
- # this is weird but apparently we only keep track of one parent
- # (why not only store that instead of a tuple?)
- copied = fctx.renamed()
- if copied:
- copied = copied[0]
- return memfilectx(repo, path, fctx.data(),
- islink=fctx.islink(), isexec=fctx.isexec(),
- copied=copied, memctx=memctx)
- self._filectxfn = getfilectx
- else:
- # memoizing increases performance for e.g. vcs convert scenarios.
- self._filectxfn = makecachingfilectxfn(filectxfn)
+ if isinstance(filectxfn, patch.filestore):
+ filectxfn = memfilefrompatch(filectxfn)
+ elif not callable(filectxfn):
+ # if store is not callable, wrap it in a function
+ filectxfn = memfilefromctx(filectxfn)
- if extra:
- self._extra = extra.copy()
- else:
- self._extra = {}
-
- if self._extra.get('branch', '') == '':
- self._extra['branch'] = 'default'
+ # memoizing increases performance for e.g. vcs convert scenarios.
+ self._filectxfn = makecachingfilectxfn(filectxfn)
if editor:
self._text = editor(self._repo, self, [])
@@ -2072,12 +2134,6 @@
def data(self):
return self._data
- def size(self):
- return len(self.data())
- def flags(self):
- return self._flags
- def renamed(self):
- return self._copied
def remove(self, ignoremissing=False):
"""wraps unlink for a repo's working directory"""
@@ -2088,6 +2144,77 @@
"""wraps repo.wwrite"""
self._data = data
+class overlayfilectx(committablefilectx):
+ """Like memfilectx but take an original filectx and optional parameters to
+ override parts of it. This is useful when fctx.data() is expensive (i.e.
+ flag processor is expensive) and raw data, flags, and filenode could be
+ reused (ex. rebase or mode-only amend a REVIDX_EXTSTORED file).
+ """
+
+ def __init__(self, originalfctx, datafunc=None, path=None, flags=None,
+ copied=None, ctx=None):
+ """originalfctx: filecontext to duplicate
+
+ datafunc: None or a function to override data (file content). It is a
+ function to be lazy. path, flags, copied, ctx: None or overridden value
+
+ copied could be (path, rev), or False. copied could also be just path,
+ and will be converted to (path, nullid). This simplifies some callers.
+ """
+
+ if path is None:
+ path = originalfctx.path()
+ if ctx is None:
+ ctx = originalfctx.changectx()
+ ctxmatch = lambda: True
+ else:
+ ctxmatch = lambda: ctx == originalfctx.changectx()
+
+ repo = originalfctx.repo()
+ flog = originalfctx.filelog()
+ super(overlayfilectx, self).__init__(repo, path, flog, ctx)
+
+ if copied is None:
+ copied = originalfctx.renamed()
+ copiedmatch = lambda: True
+ else:
+ if copied and not isinstance(copied, tuple):
+ # repo._filecommit will recalculate copyrev so nullid is okay
+ copied = (copied, nullid)
+ copiedmatch = lambda: copied == originalfctx.renamed()
+
+ # When data, copied (could affect data), ctx (could affect filelog
+ # parents) are not overridden, rawdata, rawflags, and filenode may be
+ # reused (repo._filecommit should double check filelog parents).
+ #
+ # path, flags are not hashed in filelog (but in manifestlog) so they do
+ # not affect reusable here.
+ #
+ # If ctx or copied is overridden to a same value with originalfctx,
+ # still consider it's reusable. originalfctx.renamed() may be a bit
+ # expensive so it's not called unless necessary. Assuming datafunc is
+ # always expensive, do not call it for this "reusable" test.
+ reusable = datafunc is None and ctxmatch() and copiedmatch()
+
+ if datafunc is None:
+ datafunc = originalfctx.data
+ if flags is None:
+ flags = originalfctx.flags()
+
+ self._datafunc = datafunc
+ self._flags = flags
+ self._copied = copied
+
+ if reusable:
+ # copy extra fields from originalfctx
+ attrs = ['rawdata', 'rawflags', '_filenode', '_filerev']
+ for attr in attrs:
+ if util.safehasattr(originalfctx, attr):
+ setattr(self, attr, getattr(originalfctx, attr))
+
+ def data(self):
+ return self._datafunc()
+
class metadataonlyctx(committablectx):
"""Like memctx but it's reusing the manifest of different commit.
Intended to be used by lightweight operations that are creating
@@ -2129,14 +2256,6 @@
self._files = originalctx.files()
self.substate = {}
- if extra:
- self._extra = extra.copy()
- else:
- self._extra = {}
-
- if self._extra.get('branch', '') == '':
- self._extra['branch'] = 'default'
-
if editor:
self._text = editor(self._repo, self, [])
self._repo.savecommitmessage(self._text)
@@ -2144,7 +2263,7 @@
def manifestnode(self):
return self._manifestnode
- @propertycache
+ @property
def _manifestctx(self):
return self._repo.manifestlog[self._manifestnode]
--- a/mercurial/copies.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/copies.py Tue Jun 20 16:33:46 2017 -0400
@@ -414,13 +414,15 @@
baselabel='topological common ancestor')
for f in u1u:
- _checkcopies(c1, f, m1, m2, base, tca, dirtyc1, limit, data1)
+ _checkcopies(c1, c2, f, base, tca, dirtyc1, limit, data1)
for f in u2u:
- _checkcopies(c2, f, m2, m1, base, tca, dirtyc2, limit, data2)
+ _checkcopies(c2, c1, f, base, tca, dirtyc2, limit, data2)
- copy = dict(data1['copy'].items() + data2['copy'].items())
- fullcopy = dict(data1['fullcopy'].items() + data2['fullcopy'].items())
+ copy = dict(data1['copy'])
+ copy.update(data2['copy'])
+ fullcopy = dict(data1['fullcopy'])
+ fullcopy.update(data2['fullcopy'])
if dirtyc1:
_combinecopies(data2['incomplete'], data1['incomplete'], copy, diverge,
@@ -462,8 +464,8 @@
'incompletediverge': bothincompletediverge
}
for f in bothnew:
- _checkcopies(c1, f, m1, m2, base, tca, dirtyc1, limit, both1)
- _checkcopies(c2, f, m2, m1, base, tca, dirtyc2, limit, both2)
+ _checkcopies(c1, c2, f, base, tca, dirtyc1, limit, both1)
+ _checkcopies(c2, c1, f, base, tca, dirtyc2, limit, both2)
if dirtyc1:
# incomplete copies may only be found on the "dirty" side for bothnew
assert not both2['incomplete']
@@ -598,17 +600,16 @@
except StopIteration:
return False
-def _checkcopies(ctx, f, m1, m2, base, tca, remotebase, limit, data):
+def _checkcopies(srcctx, dstctx, f, base, tca, remotebase, limit, data):
"""
- check possible copies of f from m1 to m2
+ check possible copies of f from msrc to mdst
- ctx = starting context for f in m1
- f = the filename to check (as in m1)
- m1 = the source manifest
- m2 = the destination manifest
+ srcctx = starting context for f in msrc
+ dstctx = destination context for f in mdst
+ f = the filename to check (as in msrc)
base = the changectx used as a merge base
tca = topological common ancestor for graft-like scenarios
- remotebase = True if base is outside tca::ctx, False otherwise
+ remotebase = True if base is outside tca::srcctx, False otherwise
limit = the rev number to not search beyond
data = dictionary of dictionary to store copy data. (see mergecopies)
@@ -618,6 +619,8 @@
once it "goes behind a certain revision".
"""
+ msrc = srcctx.manifest()
+ mdst = dstctx.manifest()
mb = base.manifest()
mta = tca.manifest()
# Might be true if this call is about finding backward renames,
@@ -630,15 +633,16 @@
# the base) this is more complicated as we must detect a divergence.
# We use 'backwards = False' in that case.
backwards = not remotebase and base != tca and f in mb
- getfctx = _makegetfctx(ctx)
+ getsrcfctx = _makegetfctx(srcctx)
+ getdstfctx = _makegetfctx(dstctx)
- if m1[f] == mb.get(f) and not remotebase:
+ if msrc[f] == mb.get(f) and not remotebase:
# Nothing to merge
return
of = None
- seen = set([f])
- for oc in getfctx(f, m1[f]).ancestors():
+ seen = {f}
+ for oc in getsrcfctx(f, msrc[f]).ancestors():
ocr = oc.linkrev()
of = oc.path()
if of in seen:
@@ -653,11 +657,11 @@
data['fullcopy'][of] = f # grafting backwards through renames
else:
data['fullcopy'][f] = of
- if of not in m2:
+ if of not in mdst:
continue # no match, keep looking
- if m2[of] == mb.get(of):
+ if mdst[of] == mb.get(of):
return # no merge needed, quit early
- c2 = getfctx(of, m2[of])
+ c2 = getdstfctx(of, mdst[of])
# c2 might be a plain new file on added on destination side that is
# unrelated to the droids we are looking for.
cr = _related(oc, c2, tca.rev())
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/dagop.py Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,424 @@
+# dagop.py - graph ancestry and topology algorithm for revset
+#
+# Copyright 2010 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from __future__ import absolute_import
+
+import heapq
+
+from . import (
+ error,
+ mdiff,
+ node,
+ patch,
+ smartset,
+)
+
+baseset = smartset.baseset
+generatorset = smartset.generatorset
+
+def revancestors(repo, revs, followfirst):
+ """Like revlog.ancestors(), but supports followfirst."""
+ if followfirst:
+ cut = 1
+ else:
+ cut = None
+ cl = repo.changelog
+
+ def iterate():
+ revs.sort(reverse=True)
+ irevs = iter(revs)
+ h = []
+
+ inputrev = next(irevs, None)
+ if inputrev is not None:
+ heapq.heappush(h, -inputrev)
+
+ seen = set()
+ while h:
+ current = -heapq.heappop(h)
+ if current == inputrev:
+ inputrev = next(irevs, None)
+ if inputrev is not None:
+ heapq.heappush(h, -inputrev)
+ if current not in seen:
+ seen.add(current)
+ yield current
+ try:
+ for parent in cl.parentrevs(current)[:cut]:
+ if parent != node.nullrev:
+ heapq.heappush(h, -parent)
+ except error.WdirUnsupported:
+ for parent in repo[current].parents()[:cut]:
+ if parent.rev() != node.nullrev:
+ heapq.heappush(h, -parent.rev())
+
+ return generatorset(iterate(), iterasc=False)
+
+def revdescendants(repo, revs, followfirst):
+ """Like revlog.descendants() but supports followfirst."""
+ if followfirst:
+ cut = 1
+ else:
+ cut = None
+
+ def iterate():
+ cl = repo.changelog
+ # XXX this should be 'parentset.min()' assuming 'parentset' is a
+ # smartset (and if it is not, it should.)
+ first = min(revs)
+ nullrev = node.nullrev
+ if first == nullrev:
+ # Are there nodes with a null first parent and a non-null
+ # second one? Maybe. Do we care? Probably not.
+ for i in cl:
+ yield i
+ else:
+ seen = set(revs)
+ for i in cl.revs(first + 1):
+ for x in cl.parentrevs(i)[:cut]:
+ if x != nullrev and x in seen:
+ seen.add(i)
+ yield i
+ break
+
+ return generatorset(iterate(), iterasc=True)
+
+def _reachablerootspure(repo, minroot, roots, heads, includepath):
+ """return (heads(::<roots> and ::<heads>))
+
+ If includepath is True, return (<roots>::<heads>)."""
+ if not roots:
+ return []
+ parentrevs = repo.changelog.parentrevs
+ roots = set(roots)
+ visit = list(heads)
+ reachable = set()
+ seen = {}
+ # prefetch all the things! (because python is slow)
+ reached = reachable.add
+ dovisit = visit.append
+ nextvisit = visit.pop
+ # open-code the post-order traversal due to the tiny size of
+ # sys.getrecursionlimit()
+ while visit:
+ rev = nextvisit()
+ if rev in roots:
+ reached(rev)
+ if not includepath:
+ continue
+ parents = parentrevs(rev)
+ seen[rev] = parents
+ for parent in parents:
+ if parent >= minroot and parent not in seen:
+ dovisit(parent)
+ if not reachable:
+ return baseset()
+ if not includepath:
+ return reachable
+ for rev in sorted(seen):
+ for parent in seen[rev]:
+ if parent in reachable:
+ reached(rev)
+ return reachable
+
+def reachableroots(repo, roots, heads, includepath=False):
+ """return (heads(::<roots> and ::<heads>))
+
+ If includepath is True, return (<roots>::<heads>)."""
+ if not roots:
+ return baseset()
+ minroot = roots.min()
+ roots = list(roots)
+ heads = list(heads)
+ try:
+ revs = repo.changelog.reachableroots(minroot, heads, roots, includepath)
+ except AttributeError:
+ revs = _reachablerootspure(repo, minroot, roots, heads, includepath)
+ revs = baseset(revs)
+ revs.sort()
+ return revs
+
+def _changesrange(fctx1, fctx2, linerange2, diffopts):
+ """Return `(diffinrange, linerange1)` where `diffinrange` is True
+ if diff from fctx2 to fctx1 has changes in linerange2 and
+ `linerange1` is the new line range for fctx1.
+ """
+ blocks = mdiff.allblocks(fctx1.data(), fctx2.data(), diffopts)
+ filteredblocks, linerange1 = mdiff.blocksinrange(blocks, linerange2)
+ diffinrange = any(stype == '!' for _, stype in filteredblocks)
+ return diffinrange, linerange1
+
+def blockancestors(fctx, fromline, toline, followfirst=False):
+ """Yield ancestors of `fctx` with respect to the block of lines within
+ `fromline`-`toline` range.
+ """
+ diffopts = patch.diffopts(fctx._repo.ui)
+ introrev = fctx.introrev()
+ if fctx.rev() != introrev:
+ fctx = fctx.filectx(fctx.filenode(), changeid=introrev)
+ visit = {(fctx.linkrev(), fctx.filenode()): (fctx, (fromline, toline))}
+ while visit:
+ c, linerange2 = visit.pop(max(visit))
+ pl = c.parents()
+ if followfirst:
+ pl = pl[:1]
+ if not pl:
+ # The block originates from the initial revision.
+ yield c, linerange2
+ continue
+ inrange = False
+ for p in pl:
+ inrangep, linerange1 = _changesrange(p, c, linerange2, diffopts)
+ inrange = inrange or inrangep
+ if linerange1[0] == linerange1[1]:
+ # Parent's linerange is empty, meaning that the block got
+ # introduced in this revision; no need to go futher in this
+ # branch.
+ continue
+ # Set _descendantrev with 'c' (a known descendant) so that, when
+ # _adjustlinkrev is called for 'p', it receives this descendant
+ # (as srcrev) instead possibly topmost introrev.
+ p._descendantrev = c.rev()
+ visit[p.linkrev(), p.filenode()] = p, linerange1
+ if inrange:
+ yield c, linerange2
+
+def blockdescendants(fctx, fromline, toline):
+ """Yield descendants of `fctx` with respect to the block of lines within
+ `fromline`-`toline` range.
+ """
+ # First possibly yield 'fctx' if it has changes in range with respect to
+ # its parents.
+ try:
+ c, linerange1 = next(blockancestors(fctx, fromline, toline))
+ except StopIteration:
+ pass
+ else:
+ if c == fctx:
+ yield c, linerange1
+
+ diffopts = patch.diffopts(fctx._repo.ui)
+ fl = fctx.filelog()
+ seen = {fctx.filerev(): (fctx, (fromline, toline))}
+ for i in fl.descendants([fctx.filerev()]):
+ c = fctx.filectx(i)
+ inrange = False
+ for x in fl.parentrevs(i):
+ try:
+ p, linerange2 = seen[x]
+ except KeyError:
+ # nullrev or other branch
+ continue
+ inrangep, linerange1 = _changesrange(c, p, linerange2, diffopts)
+ inrange = inrange or inrangep
+ # If revision 'i' has been seen (it's a merge), we assume that its
+ # line range is the same independently of which parents was used
+ # to compute it.
+ assert i not in seen or seen[i][1] == linerange1, (
+ 'computed line range for %s is not consistent between '
+ 'ancestor branches' % c)
+ seen[i] = c, linerange1
+ if inrange:
+ yield c, linerange1
+
+def toposort(revs, parentsfunc, firstbranch=()):
+ """Yield revisions from heads to roots one (topo) branch at a time.
+
+ This function aims to be used by a graph generator that wishes to minimize
+ the number of parallel branches and their interleaving.
+
+ Example iteration order (numbers show the "true" order in a changelog):
+
+ o 4
+ |
+ o 1
+ |
+ | o 3
+ | |
+ | o 2
+ |/
+ o 0
+
+ Note that the ancestors of merges are understood by the current
+ algorithm to be on the same branch. This means no reordering will
+ occur behind a merge.
+ """
+
+ ### Quick summary of the algorithm
+ #
+ # This function is based around a "retention" principle. We keep revisions
+ # in memory until we are ready to emit a whole branch that immediately
+ # "merges" into an existing one. This reduces the number of parallel
+ # branches with interleaved revisions.
+ #
+ # During iteration revs are split into two groups:
+ # A) revision already emitted
+ # B) revision in "retention". They are stored as different subgroups.
+ #
+ # for each REV, we do the following logic:
+ #
+ # 1) if REV is a parent of (A), we will emit it. If there is a
+ # retention group ((B) above) that is blocked on REV being
+ # available, we emit all the revisions out of that retention
+ # group first.
+ #
+ # 2) else, we'll search for a subgroup in (B) awaiting for REV to be
+ # available, if such subgroup exist, we add REV to it and the subgroup is
+ # now awaiting for REV.parents() to be available.
+ #
+ # 3) finally if no such group existed in (B), we create a new subgroup.
+ #
+ #
+ # To bootstrap the algorithm, we emit the tipmost revision (which
+ # puts it in group (A) from above).
+
+ revs.sort(reverse=True)
+
+ # Set of parents of revision that have been emitted. They can be considered
+ # unblocked as the graph generator is already aware of them so there is no
+ # need to delay the revisions that reference them.
+ #
+ # If someone wants to prioritize a branch over the others, pre-filling this
+ # set will force all other branches to wait until this branch is ready to be
+ # emitted.
+ unblocked = set(firstbranch)
+
+ # list of groups waiting to be displayed, each group is defined by:
+ #
+ # (revs: lists of revs waiting to be displayed,
+ # blocked: set of that cannot be displayed before those in 'revs')
+ #
+ # The second value ('blocked') correspond to parents of any revision in the
+ # group ('revs') that is not itself contained in the group. The main idea
+ # of this algorithm is to delay as much as possible the emission of any
+ # revision. This means waiting for the moment we are about to display
+ # these parents to display the revs in a group.
+ #
+ # This first implementation is smart until it encounters a merge: it will
+ # emit revs as soon as any parent is about to be emitted and can grow an
+ # arbitrary number of revs in 'blocked'. In practice this mean we properly
+ # retains new branches but gives up on any special ordering for ancestors
+ # of merges. The implementation can be improved to handle this better.
+ #
+ # The first subgroup is special. It corresponds to all the revision that
+ # were already emitted. The 'revs' lists is expected to be empty and the
+ # 'blocked' set contains the parents revisions of already emitted revision.
+ #
+ # You could pre-seed the <parents> set of groups[0] to a specific
+ # changesets to select what the first emitted branch should be.
+ groups = [([], unblocked)]
+ pendingheap = []
+ pendingset = set()
+
+ heapq.heapify(pendingheap)
+ heappop = heapq.heappop
+ heappush = heapq.heappush
+ for currentrev in revs:
+ # Heap works with smallest element, we want highest so we invert
+ if currentrev not in pendingset:
+ heappush(pendingheap, -currentrev)
+ pendingset.add(currentrev)
+ # iterates on pending rev until after the current rev have been
+ # processed.
+ rev = None
+ while rev != currentrev:
+ rev = -heappop(pendingheap)
+ pendingset.remove(rev)
+
+ # Seek for a subgroup blocked, waiting for the current revision.
+ matching = [i for i, g in enumerate(groups) if rev in g[1]]
+
+ if matching:
+ # The main idea is to gather together all sets that are blocked
+ # on the same revision.
+ #
+ # Groups are merged when a common blocking ancestor is
+ # observed. For example, given two groups:
+ #
+ # revs [5, 4] waiting for 1
+ # revs [3, 2] waiting for 1
+ #
+ # These two groups will be merged when we process
+ # 1. In theory, we could have merged the groups when
+ # we added 2 to the group it is now in (we could have
+ # noticed the groups were both blocked on 1 then), but
+ # the way it works now makes the algorithm simpler.
+ #
+ # We also always keep the oldest subgroup first. We can
+ # probably improve the behavior by having the longest set
+ # first. That way, graph algorithms could minimise the length
+ # of parallel lines their drawing. This is currently not done.
+ targetidx = matching.pop(0)
+ trevs, tparents = groups[targetidx]
+ for i in matching:
+ gr = groups[i]
+ trevs.extend(gr[0])
+ tparents |= gr[1]
+ # delete all merged subgroups (except the one we kept)
+ # (starting from the last subgroup for performance and
+ # sanity reasons)
+ for i in reversed(matching):
+ del groups[i]
+ else:
+ # This is a new head. We create a new subgroup for it.
+ targetidx = len(groups)
+ groups.append(([], {rev}))
+
+ gr = groups[targetidx]
+
+ # We now add the current nodes to this subgroups. This is done
+ # after the subgroup merging because all elements from a subgroup
+ # that relied on this rev must precede it.
+ #
+ # we also update the <parents> set to include the parents of the
+ # new nodes.
+ if rev == currentrev: # only display stuff in rev
+ gr[0].append(rev)
+ gr[1].remove(rev)
+ parents = [p for p in parentsfunc(rev) if p > node.nullrev]
+ gr[1].update(parents)
+ for p in parents:
+ if p not in pendingset:
+ pendingset.add(p)
+ heappush(pendingheap, -p)
+
+ # Look for a subgroup to display
+ #
+ # When unblocked is empty (if clause), we were not waiting for any
+ # revisions during the first iteration (if no priority was given) or
+ # if we emitted a whole disconnected set of the graph (reached a
+ # root). In that case we arbitrarily take the oldest known
+ # subgroup. The heuristic could probably be better.
+ #
+ # Otherwise (elif clause) if the subgroup is blocked on
+ # a revision we just emitted, we can safely emit it as
+ # well.
+ if not unblocked:
+ if len(groups) > 1: # display other subset
+ targetidx = 1
+ gr = groups[1]
+ elif not gr[1] & unblocked:
+ gr = None
+
+ if gr is not None:
+ # update the set of awaited revisions with the one from the
+ # subgroup
+ unblocked |= gr[1]
+ # output all revisions in the subgroup
+ for r in gr[0]:
+ yield r
+ # delete the subgroup that you just output
+ # unless it is groups[0] in which case you just empty it.
+ if targetidx:
+ del groups[targetidx]
+ else:
+ gr[0][:] = []
+ # Check if we have some subgroup waiting for revisions we are not going to
+ # iterate over
+ for g in groups:
+ for r in g[0]:
+ yield r
--- a/mercurial/debugcommands.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/debugcommands.py Tue Jun 20 16:33:46 2017 -0400
@@ -32,7 +32,6 @@
changegroup,
cmdutil,
color,
- commands,
context,
dagparser,
dagutil,
@@ -40,6 +39,7 @@
error,
exchange,
extensions,
+ filemerge,
fileset,
formatter,
hg,
@@ -47,9 +47,11 @@
lock as lockmod,
merge as mergemod,
obsolete,
+ phases,
policy,
pvec,
pycompat,
+ registrar,
repair,
revlog,
revset,
@@ -69,9 +71,7 @@
release = lockmod.release
-# We reuse the command table from commands because it is easier than
-# teaching dispatch about multiple tables.
-command = cmdutil.command(commands.table)
+command = registrar.command()
@command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
def debugancestor(ui, repo, *args):
@@ -289,20 +289,47 @@
ui.write("%s%s\n" % (indent_string, hex(node)))
chain = node
+def _debugobsmarkers(ui, data, all=None, indent=0, **opts):
+ """display version and markers contained in 'data'"""
+ indent_string = ' ' * indent
+ try:
+ version, markers = obsolete._readmarkers(data)
+ except error.UnknownVersion as exc:
+ msg = "%sunsupported version: %s (%d bytes)\n"
+ msg %= indent_string, exc.version, len(data)
+ ui.write(msg)
+ else:
+ msg = "%sversion: %s (%d bytes)\n"
+ msg %= indent_string, version, len(data)
+ ui.write(msg)
+ fm = ui.formatter('debugobsolete', opts)
+ for rawmarker in sorted(markers):
+ m = obsolete.marker(None, rawmarker)
+ fm.startitem()
+ fm.plain(indent_string)
+ cmdutil.showmarker(fm, m)
+ fm.end()
+
def _debugbundle2(ui, gen, all=None, **opts):
"""lists the contents of a bundle2"""
if not isinstance(gen, bundle2.unbundle20):
raise error.Abort(_('not a bundle2 file'))
ui.write(('Stream params: %s\n' % repr(gen.params)))
+ parttypes = opts.get('part_type', [])
for part in gen.iterparts():
+ if parttypes and part.type not in parttypes:
+ continue
ui.write('%s -- %r\n' % (part.type, repr(part.params)))
if part.type == 'changegroup':
version = part.params.get('version', '01')
cg = changegroup.getunbundler(version, part, 'UN')
_debugchangegroup(ui, cg, all=all, indent=4, **opts)
+ if part.type == 'obsmarkers':
+ _debugobsmarkers(ui, part.read(), all=all, indent=4, **opts)
@command('debugbundle',
[('a', 'all', None, _('show all details')),
+ ('', 'part-type', [], _('show only the named part type')),
('', 'spec', None, _('print the bundlespec of the bundle'))],
_('FILE'),
norepo=True)
@@ -388,42 +415,6 @@
ui.write(', '.join(ui.label(e, e) for e in effects.split()))
ui.write('\n')
-@command('debugcommands', [], _('[COMMAND]'), norepo=True)
-def debugcommands(ui, cmd='', *args):
- """list all available commands and options"""
- for cmd, vals in sorted(commands.table.iteritems()):
- cmd = cmd.split('|')[0].strip('^')
- opts = ', '.join([i[1] for i in vals[1]])
- ui.write('%s: %s\n' % (cmd, opts))
-
-@command('debugcomplete',
- [('o', 'options', None, _('show the command options'))],
- _('[-o] CMD'),
- norepo=True)
-def debugcomplete(ui, cmd='', **opts):
- """returns the completion list associated with the given command"""
-
- if opts.get('options'):
- options = []
- otables = [commands.globalopts]
- if cmd:
- aliases, entry = cmdutil.findcmd(cmd, commands.table, False)
- otables.append(entry[1])
- for t in otables:
- for o in t:
- if "(DEPRECATED)" in o[3]:
- continue
- if o[0]:
- options.append('-%s' % o[0])
- options.append('--%s' % o[1])
- ui.write("%s\n" % "\n".join(options))
- return
-
- cmdlist, unused_allcmds = cmdutil.findpossible(cmd, commands.table)
- if ui.verbose:
- cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
- ui.write("%s\n" % "\n".join(sorted(cmdlist)))
-
@command('debugcreatestreamclonebundle', [], 'FILE')
def debugcreatestreamclonebundle(ui, repo, fname):
"""create a stream clone bundle file
@@ -431,6 +422,12 @@
Stream bundles are special bundles that are essentially archives of
revlog files. They are commonly used for cloning very quickly.
"""
+ # TODO we may want to turn this into an abort when this functionality
+ # is moved into `hg bundle`.
+ if phases.hassecret(repo):
+ ui.warn(_('(warning: stream clone bundle will contain secret '
+ 'revisions)\n'))
+
requirements, gen = streamclone.generatebundlev1(repo)
changegroup.writechunks(ui, gen, fname)
@@ -499,7 +496,7 @@
ui.write(line)
ui.write("\n")
-@command('debugdata', commands.debugrevlogopts, _('-c|-m|FILE REV'))
+@command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
def debugdata(ui, repo, file_, rev=None, **opts):
"""dump the contents of a data file revision"""
if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
@@ -531,7 +528,7 @@
ui.write(("match: %s\n") % m(d[0]))
@command('debugdeltachain',
- commands.debugrevlogopts + commands.formatteropts,
+ cmdutil.debugrevlogopts + cmdutil.formatteropts,
_('-c|-m|FILE'),
optionalrepo=True)
def debugdeltachain(ui, repo, file_=None, **opts):
@@ -560,7 +557,7 @@
"""
r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
index = r.index
- generaldelta = r.version & revlog.REVLOGGENERALDELTA
+ generaldelta = r.version & revlog.FLAG_GENERALDELTA
def revinfo(rev):
e = index[rev]
@@ -666,7 +663,7 @@
[('', 'old', None, _('use old-style discovery')),
('', 'nonheads', None,
_('use old-style discovery with non-heads included')),
- ] + commands.remoteopts,
+ ] + cmdutil.remoteopts,
_('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
def debugdiscovery(ui, repo, remoteurl="default", **opts):
"""runs the changeset discovery protocol in isolation"""
@@ -728,7 +725,7 @@
localrevs = opts.get('local_head')
doit(localrevs, remoterevs)
-@command('debugextensions', commands.formatteropts, [], norepo=True)
+@command('debugextensions', cmdutil.formatteropts, [], norepo=True)
def debugextensions(ui, **opts):
'''show information about active extensions'''
exts = extensions.extensions(ui)
@@ -847,11 +844,7 @@
ignore = repo.dirstate._ignore
if not files:
# Show all the patterns
- includepat = getattr(ignore, 'includepat', None)
- if includepat is not None:
- ui.write("%s\n" % includepat)
- else:
- raise error.Abort(_("no ignore patterns found"))
+ ui.write("%s\n" % repr(ignore))
else:
for f in files:
nf = util.normpath(f)
@@ -880,7 +873,7 @@
else:
ui.write(_("%s is not ignored\n") % f)
-@command('debugindex', commands.debugrevlogopts +
+@command('debugindex', cmdutil.debugrevlogopts +
[('f', 'format', 0, _('revlog format'), _('FORMAT'))],
_('[-f FORMAT] -c|-m|FILE'),
optionalrepo=True)
@@ -891,7 +884,7 @@
if format not in (0, 1):
raise error.Abort(_("unknown format %d") % format)
- generaldelta = r.version & revlog.REVLOGGENERALDELTA
+ generaldelta = r.version & revlog.FLAG_GENERALDELTA
if generaldelta:
basehdr = ' delta'
else:
@@ -936,7 +929,7 @@
i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
-@command('debugindexdot', commands.debugrevlogopts,
+@command('debugindexdot', cmdutil.debugrevlogopts,
_('-c|-m|FILE'), optionalrepo=True)
def debugindexdot(ui, repo, file_=None, **opts):
"""dump an index DAG as a graphviz dot file"""
@@ -950,7 +943,7 @@
ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
ui.write("}\n")
-@command('debuginstall', [] + commands.formatteropts, '', norepo=True)
+@command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
def debuginstall(ui, **opts):
'''test Mercurial installation
@@ -1020,19 +1013,20 @@
fm.write('hgmodules', _("checking installed modules (%s)...\n"),
os.path.dirname(pycompat.fsencode(__file__)))
- err = None
- try:
- from . import (
- base85,
- bdiff,
- mpatch,
- osutil,
- )
- dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
- except Exception as inst:
- err = inst
- problems += 1
- fm.condwrite(err, 'extensionserror', " %s\n", err)
+ if policy.policy in ('c', 'allow'):
+ err = None
+ try:
+ from .cext import (
+ base85,
+ bdiff,
+ mpatch,
+ osutil,
+ )
+ dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
+ except Exception as inst:
+ err = inst
+ problems += 1
+ fm.condwrite(err, 'extensionserror', " %s\n", err)
compengines = util.compengines._engines.values()
fm.write('compengines', _('checking registered compression engines (%s)\n'),
@@ -1330,9 +1324,11 @@
('', 'record-parents', False,
_('record parent information for the precursor')),
('r', 'rev', [], _('display markers relevant to REV')),
+ ('', 'exclusive', False, _('restrict display to markers only '
+ 'relevant to REV')),
('', 'index', False, _('display index of the marker')),
('', 'delete', [], _('delete markers specified by indices')),
- ] + commands.commitopts2 + commands.formatteropts,
+ ] + cmdutil.commitopts2 + cmdutil.formatteropts,
_('[OBSOLETED [REPLACEMENT ...]]'))
def debugobsolete(ui, repo, precursor=None, *successors, **opts):
"""create arbitrary obsolete marker
@@ -1396,7 +1392,7 @@
parents = tuple(p.node() for p in parents)
repo.obsstore.create(tr, prec, succs, opts['flags'],
parents=parents, date=date,
- metadata=metadata)
+ metadata=metadata, ui=ui)
tr.close()
except ValueError as exc:
raise error.Abort(_('bad obsmarker input: %s') % exc)
@@ -1408,7 +1404,8 @@
if opts['rev']:
revs = scmutil.revrange(repo, opts['rev'])
nodes = [repo[r].node() for r in revs]
- markers = list(obsolete.getmarkers(repo, nodes=nodes))
+ markers = list(obsolete.getmarkers(repo, nodes=nodes,
+ exclusive=opts['exclusive']))
markers.sort(key=lambda x: x._data)
else:
markers = obsolete.getmarkers(repo)
@@ -1501,6 +1498,84 @@
ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
ui.write('\n')
+@command('debugpickmergetool',
+ [('r', 'rev', '', _('check for files in this revision'), _('REV')),
+ ('', 'changedelete', None, _('emulate merging change and delete')),
+ ] + cmdutil.walkopts + cmdutil.mergetoolopts,
+ _('[PATTERN]...'),
+ inferrepo=True)
+def debugpickmergetool(ui, repo, *pats, **opts):
+ """examine which merge tool is chosen for specified file
+
+ As described in :hg:`help merge-tools`, Mercurial examines
+ configurations below in this order to decide which merge tool is
+ chosen for specified file.
+
+ 1. ``--tool`` option
+ 2. ``HGMERGE`` environment variable
+ 3. configurations in ``merge-patterns`` section
+ 4. configuration of ``ui.merge``
+ 5. configurations in ``merge-tools`` section
+ 6. ``hgmerge`` tool (for historical reason only)
+ 7. default tool for fallback (``:merge`` or ``:prompt``)
+
+ This command writes out examination result in the style below::
+
+ FILE = MERGETOOL
+
+ By default, all files known in the first parent context of the
+ working directory are examined. Use file patterns and/or -I/-X
+ options to limit target files. -r/--rev is also useful to examine
+ files in another context without actual updating to it.
+
+ With --debug, this command shows warning messages while matching
+ against ``merge-patterns`` and so on, too. It is recommended to
+ use this option with explicit file patterns and/or -I/-X options,
+ because this option increases amount of output per file according
+ to configurations in hgrc.
+
+ With -v/--verbose, this command shows configurations below at
+ first (only if specified).
+
+ - ``--tool`` option
+ - ``HGMERGE`` environment variable
+ - configuration of ``ui.merge``
+
+ If merge tool is chosen before matching against
+ ``merge-patterns``, this command can't show any helpful
+ information, even with --debug. In such case, information above is
+ useful to know why a merge tool is chosen.
+ """
+ overrides = {}
+ if opts['tool']:
+ overrides[('ui', 'forcemerge')] = opts['tool']
+ ui.note(('with --tool %r\n') % (opts['tool']))
+
+ with ui.configoverride(overrides, 'debugmergepatterns'):
+ hgmerge = encoding.environ.get("HGMERGE")
+ if hgmerge is not None:
+ ui.note(('with HGMERGE=%r\n') % (hgmerge))
+ uimerge = ui.config("ui", "merge")
+ if uimerge:
+ ui.note(('with ui.merge=%r\n') % (uimerge))
+
+ ctx = scmutil.revsingle(repo, opts.get('rev'))
+ m = scmutil.match(ctx, pats, opts)
+ changedelete = opts['changedelete']
+ for path in ctx.walk(m):
+ fctx = ctx[path]
+ try:
+ if not ui.debugflag:
+ ui.pushbuffer(error=True)
+ tool, toolpath = filemerge._picktool(repo, ui, path,
+ fctx.isbinary(),
+ 'l' in fctx.flags(),
+ changedelete)
+ finally:
+ if not ui.debugflag:
+ ui.popbuffer()
+ ui.write(('%s = %s\n') % (path, tool))
+
@command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
'''access the pushkey key/value protocol
@@ -1603,7 +1678,7 @@
else:
ui.write(_("%s not renamed\n") % rel)
-@command('debugrevlog', commands.debugrevlogopts +
+@command('debugrevlog', cmdutil.debugrevlogopts +
[('d', 'dump', False, _('dump index data'))],
_('-c|-m|FILE'),
optionalrepo=True)
@@ -1645,9 +1720,9 @@
format = v & 0xFFFF
flags = []
gdelta = False
- if v & revlog.REVLOGNGINLINEDATA:
+ if v & revlog.FLAG_INLINE_DATA:
flags.append('inline')
- if v & revlog.REVLOGGENERALDELTA:
+ if v & revlog.FLAG_GENERALDELTA:
gdelta = True
flags.append('generaldelta')
if not flags:
@@ -1706,9 +1781,9 @@
numother += 1
# Obtain data on the raw chunks in the revlog.
- chunk = r._chunkraw(rev, rev)[1]
- if chunk:
- chunktype = chunk[0]
+ segment = r._getsegmentforrevs(rev, rev)[1]
+ if segment:
+ chunktype = segment[0]
else:
chunktype = 'empty'
@@ -1826,6 +1901,8 @@
@command('debugrevspec',
[('', 'optimize', None,
_('print parsed tree after optimizing (DEPRECATED)')),
+ ('', 'show-revs', True, _('print list of result revisions (default)')),
+ ('s', 'show-set', None, _('print internal representation of result set')),
('p', 'show-stage', [],
_('print parsed tree at the given stage'), _('NAME')),
('', 'no-optimized', False, _('evaluate tree without optimization')),
@@ -1838,6 +1915,9 @@
Use -p/--show-stage option to print the parsed tree at the given stages.
Use -p all to print tree at every stage.
+ Use --no-show-revs option with -s or -p to print only the set
+ representation or the parsed tree respectively.
+
Use --verify-optimized to compare the optimized result with the unoptimized
one. Returns 1 if the optimized result differs.
"""
@@ -1887,9 +1967,9 @@
if opts['verify_optimized']:
arevs = revset.makematcher(treebystage['analyzed'])(repo)
brevs = revset.makematcher(treebystage['optimized'])(repo)
- if ui.verbose:
- ui.note(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
- ui.note(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
+ if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
+ ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
+ ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
arevs = list(arevs)
brevs = list(brevs)
if arevs == brevs:
@@ -1911,8 +1991,10 @@
func = revset.makematcher(tree)
revs = func(repo)
- if ui.verbose:
- ui.note(("* set:\n"), smartset.prettyformat(revs), "\n")
+ if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
+ ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
+ if not opts['show_revs']:
+ return
for c in revs:
ui.write("%s\n" % c)
@@ -2038,18 +2120,23 @@
if newtree != tree:
ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
- mapfile = None
if revs is None:
- k = 'debugtemplate'
- t = formatter.maketemplater(ui, k, tmpl)
- ui.write(templater.stringify(t(k, ui=ui, **props)))
+ t = formatter.maketemplater(ui, tmpl)
+ props['ui'] = ui
+ ui.write(t.render(props))
else:
- displayer = cmdutil.changeset_templater(ui, repo, None, opts, tmpl,
- mapfile, buffered=False)
+ displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
for r in revs:
displayer.show(repo[r], **props)
displayer.close()
+@command('debugupdatecaches', [])
+def debugupdatecaches(ui, repo, *pats, **opts):
+ """warm all known caches in the repository"""
+ with repo.wlock():
+ with repo.lock():
+ repo.updatecaches()
+
@command('debugupgraderepo', [
('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
('', 'run', False, _('performs an upgrade')),
@@ -2075,12 +2162,13 @@
"""
return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
-@command('debugwalk', commands.walkopts, _('[OPTION]... [FILE]...'),
+@command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
inferrepo=True)
def debugwalk(ui, repo, *pats, **opts):
"""show how files match on given patterns"""
m = scmutil.match(repo[None], pats, opts)
- items = list(repo.walk(m))
+ ui.write(('matcher: %r\n' % m))
+ items = list(repo[None].walk(m))
if not items:
return
f = lambda fn: fn
@@ -2097,12 +2185,12 @@
[('', 'three', '', 'three'),
('', 'four', '', 'four'),
('', 'five', '', 'five'),
- ] + commands.remoteopts,
+ ] + cmdutil.remoteopts,
_('REPO [OPTIONS]... [ONE [TWO]]'),
norepo=True)
def debugwireargs(ui, repopath, *vals, **opts):
repo = hg.peer(ui, opts, repopath)
- for opt in commands.remoteopts:
+ for opt in cmdutil.remoteopts:
del opts[opt[1]]
args = {}
for k, v in opts.iteritems():
--- a/mercurial/demandimport.py Tue Jun 13 22:24:41 2017 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,331 +0,0 @@
-# demandimport.py - global demand-loading of modules for Mercurial
-#
-# Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
-#
-# This software may be used and distributed according to the terms of the
-# GNU General Public License version 2 or any later version.
-
-'''
-demandimport - automatic demandloading of modules
-
-To enable this module, do:
-
- import demandimport; demandimport.enable()
-
-Imports of the following forms will be demand-loaded:
-
- import a, b.c
- import a.b as c
- from a import b,c # a will be loaded immediately
-
-These imports will not be delayed:
-
- from a import *
- b = __import__(a)
-'''
-
-from __future__ import absolute_import
-
-import contextlib
-import os
-import sys
-
-# __builtin__ in Python 2, builtins in Python 3.
-try:
- import __builtin__ as builtins
-except ImportError:
- import builtins
-
-contextmanager = contextlib.contextmanager
-
-_origimport = __import__
-
-nothing = object()
-
-# Python 3 doesn't have relative imports nor level -1.
-level = -1
-if sys.version_info[0] >= 3:
- level = 0
-_import = _origimport
-
-def _hgextimport(importfunc, name, globals, *args, **kwargs):
- try:
- return importfunc(name, globals, *args, **kwargs)
- except ImportError:
- if not globals:
- raise
- # extensions are loaded with "hgext_" prefix
- hgextname = 'hgext_%s' % name
- nameroot = hgextname.split('.', 1)[0]
- contextroot = globals.get('__name__', '').split('.', 1)[0]
- if nameroot != contextroot:
- raise
- # retry to import with "hgext_" prefix
- return importfunc(hgextname, globals, *args, **kwargs)
-
-class _demandmod(object):
- """module demand-loader and proxy
-
- Specify 1 as 'level' argument at construction, to import module
- relatively.
- """
- def __init__(self, name, globals, locals, level):
- if '.' in name:
- head, rest = name.split('.', 1)
- after = [rest]
- else:
- head = name
- after = []
- object.__setattr__(self, r"_data",
- (head, globals, locals, after, level, set()))
- object.__setattr__(self, r"_module", None)
- def _extend(self, name):
- """add to the list of submodules to load"""
- self._data[3].append(name)
-
- def _addref(self, name):
- """Record that the named module ``name`` imports this module.
-
- References to this proxy class having the name of this module will be
- replaced at module load time. We assume the symbol inside the importing
- module is identical to the "head" name of this module. We don't
- actually know if "as X" syntax is being used to change the symbol name
- because this information isn't exposed to __import__.
- """
- self._data[5].add(name)
-
- def _load(self):
- if not self._module:
- head, globals, locals, after, level, modrefs = self._data
- mod = _hgextimport(_import, head, globals, locals, None, level)
- if mod is self:
- # In this case, _hgextimport() above should imply
- # _demandimport(). Otherwise, _hgextimport() never
- # returns _demandmod. This isn't intentional behavior,
- # in fact. (see also issue5304 for detail)
- #
- # If self._module is already bound at this point, self
- # should be already _load()-ed while _hgextimport().
- # Otherwise, there is no way to import actual module
- # as expected, because (re-)invoking _hgextimport()
- # should cause same result.
- # This is reason why _load() returns without any more
- # setup but assumes self to be already bound.
- mod = self._module
- assert mod and mod is not self, "%s, %s" % (self, mod)
- return
-
- # load submodules
- def subload(mod, p):
- h, t = p, None
- if '.' in p:
- h, t = p.split('.', 1)
- if getattr(mod, h, nothing) is nothing:
- setattr(mod, h, _demandmod(p, mod.__dict__, mod.__dict__,
- level=1))
- elif t:
- subload(getattr(mod, h), t)
-
- for x in after:
- subload(mod, x)
-
- # Replace references to this proxy instance with the actual module.
- if locals and locals.get(head) == self:
- locals[head] = mod
-
- for modname in modrefs:
- modref = sys.modules.get(modname, None)
- if modref and getattr(modref, head, None) == self:
- setattr(modref, head, mod)
-
- object.__setattr__(self, r"_module", mod)
-
- def __repr__(self):
- if self._module:
- return "<proxied module '%s'>" % self._data[0]
- return "<unloaded module '%s'>" % self._data[0]
- def __call__(self, *args, **kwargs):
- raise TypeError("%s object is not callable" % repr(self))
- def __getattribute__(self, attr):
- if attr in ('_data', '_extend', '_load', '_module', '_addref'):
- return object.__getattribute__(self, attr)
- self._load()
- return getattr(self._module, attr)
- def __setattr__(self, attr, val):
- self._load()
- setattr(self._module, attr, val)
-
-_pypy = '__pypy__' in sys.builtin_module_names
-
-def _demandimport(name, globals=None, locals=None, fromlist=None, level=level):
- if not locals or name in ignore or fromlist == ('*',):
- # these cases we can't really delay
- return _hgextimport(_import, name, globals, locals, fromlist, level)
- elif not fromlist:
- # import a [as b]
- if '.' in name: # a.b
- base, rest = name.split('.', 1)
- # email.__init__ loading email.mime
- if globals and globals.get('__name__', None) == base:
- return _import(name, globals, locals, fromlist, level)
- # if a is already demand-loaded, add b to its submodule list
- if base in locals:
- if isinstance(locals[base], _demandmod):
- locals[base]._extend(rest)
- return locals[base]
- return _demandmod(name, globals, locals, level)
- else:
- # There is a fromlist.
- # from a import b,c,d
- # from . import b,c,d
- # from .a import b,c,d
-
- # level == -1: relative and absolute attempted (Python 2 only).
- # level >= 0: absolute only (Python 2 w/ absolute_import and Python 3).
- # The modern Mercurial convention is to use absolute_import everywhere,
- # so modern Mercurial code will have level >= 0.
-
- # The name of the module the import statement is located in.
- globalname = globals.get('__name__')
-
- def processfromitem(mod, attr):
- """Process an imported symbol in the import statement.
-
- If the symbol doesn't exist in the parent module, and if the
- parent module is a package, it must be a module. We set missing
- modules up as _demandmod instances.
- """
- symbol = getattr(mod, attr, nothing)
- nonpkg = getattr(mod, '__path__', nothing) is nothing
- if symbol is nothing:
- if nonpkg:
- # do not try relative import, which would raise ValueError,
- # and leave unknown attribute as the default __import__()
- # would do. the missing attribute will be detected later
- # while processing the import statement.
- return
- mn = '%s.%s' % (mod.__name__, attr)
- if mn in ignore:
- importfunc = _origimport
- else:
- importfunc = _demandmod
- symbol = importfunc(attr, mod.__dict__, locals, level=1)
- setattr(mod, attr, symbol)
-
- # Record the importing module references this symbol so we can
- # replace the symbol with the actual module instance at load
- # time.
- if globalname and isinstance(symbol, _demandmod):
- symbol._addref(globalname)
-
- def chainmodules(rootmod, modname):
- # recurse down the module chain, and return the leaf module
- mod = rootmod
- for comp in modname.split('.')[1:]:
- if getattr(mod, comp, nothing) is nothing:
- setattr(mod, comp, _demandmod(comp, mod.__dict__,
- mod.__dict__, level=1))
- mod = getattr(mod, comp)
- return mod
-
- if level >= 0:
- if name:
- # "from a import b" or "from .a import b" style
- rootmod = _hgextimport(_origimport, name, globals, locals,
- level=level)
- mod = chainmodules(rootmod, name)
- elif _pypy:
- # PyPy's __import__ throws an exception if invoked
- # with an empty name and no fromlist. Recreate the
- # desired behaviour by hand.
- mn = globalname
- mod = sys.modules[mn]
- if getattr(mod, '__path__', nothing) is nothing:
- mn = mn.rsplit('.', 1)[0]
- mod = sys.modules[mn]
- if level > 1:
- mn = mn.rsplit('.', level - 1)[0]
- mod = sys.modules[mn]
- else:
- mod = _hgextimport(_origimport, name, globals, locals,
- level=level)
-
- for x in fromlist:
- processfromitem(mod, x)
-
- return mod
-
- # But, we still need to support lazy loading of standard library and 3rd
- # party modules. So handle level == -1.
- mod = _hgextimport(_origimport, name, globals, locals)
- mod = chainmodules(mod, name)
-
- for x in fromlist:
- processfromitem(mod, x)
-
- return mod
-
-ignore = [
- '__future__',
- '_hashlib',
- # ImportError during pkg_resources/__init__.py:fixup_namespace_package
- '_imp',
- '_xmlplus',
- 'fcntl',
- 'nt', # pathlib2 tests the existence of built-in 'nt' module
- 'win32com.gen_py',
- 'win32com.shell', # 'appdirs' tries to import win32com.shell
- '_winreg', # 2.7 mimetypes needs immediate ImportError
- 'pythoncom',
- # imported by tarfile, not available under Windows
- 'pwd',
- 'grp',
- # imported by profile, itself imported by hotshot.stats,
- # not available under Windows
- 'resource',
- # this trips up many extension authors
- 'gtk',
- # setuptools' pkg_resources.py expects "from __main__ import x" to
- # raise ImportError if x not defined
- '__main__',
- '_ssl', # conditional imports in the stdlib, issue1964
- '_sre', # issue4920
- 'rfc822',
- 'mimetools',
- 'sqlalchemy.events', # has import-time side effects (issue5085)
- # setuptools 8 expects this module to explode early when not on windows
- 'distutils.msvc9compiler',
- '__builtin__',
- 'builtins',
- ]
-
-if _pypy:
- ignore.extend([
- # _ctypes.pointer is shadowed by "from ... import pointer" (PyPy 5)
- '_ctypes.pointer',
- ])
-
-def isenabled():
- return builtins.__import__ == _demandimport
-
-def enable():
- "enable global demand-loading of modules"
- if os.environ.get('HGDEMANDIMPORT') != 'disable':
- builtins.__import__ = _demandimport
-
-def disable():
- "disable global demand-loading of modules"
- builtins.__import__ = _origimport
-
-@contextmanager
-def deactivated():
- "context manager for disabling demandimport in 'with' blocks"
- demandenabled = isenabled()
- if demandenabled:
- disable()
-
- try:
- yield
- finally:
- if demandenabled:
- enable()
--- a/mercurial/destutil.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/destutil.py Tue Jun 20 16:33:46 2017 -0400
@@ -234,7 +234,7 @@
def _destmergebook(repo, action='merge', sourceset=None, destspace=None):
"""find merge destination in the active bookmark case"""
node = None
- bmheads = repo.bookmarkheads(repo._activebookmark)
+ bmheads = bookmarks.headsforactive(repo)
curhead = repo[repo._activebookmark].node()
if len(bmheads) == 2:
if curhead == bmheads[0]:
@@ -355,7 +355,7 @@
return None
def _statusotherbook(ui, repo):
- bmheads = repo.bookmarkheads(repo._activebookmark)
+ bmheads = bookmarks.headsforactive(repo)
curhead = repo[repo._activebookmark].node()
if repo.revs('%n and parents()', curhead):
# we are on the active bookmark
@@ -391,6 +391,9 @@
ui.warn(_('(committing will reopen branch "%s")\n') %
(currentbranch))
elif otherheads:
+ curhead = repo['.']
+ ui.status(_('updated to "%s: %s"\n') % (curhead,
+ curhead.description().split('\n')[0]))
ui.status(_('%i other heads for branch "%s"\n') %
(len(otherheads), currentbranch))
--- a/mercurial/diffhelpers.c Tue Jun 13 22:24:41 2017 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,199 +0,0 @@
-/*
- * diffhelpers.c - helper routines for mpatch
- *
- * Copyright 2007 Chris Mason <chris.mason@oracle.com>
- *
- * This software may be used and distributed according to the terms
- * of the GNU General Public License v2, incorporated herein by reference.
- */
-
-#include <Python.h>
-#include <stdlib.h>
-#include <string.h>
-
-#include "util.h"
-
-static char diffhelpers_doc[] = "Efficient diff parsing";
-static PyObject *diffhelpers_Error;
-
-
-/* fixup the last lines of a and b when the patch has no newline at eof */
-static void _fix_newline(PyObject *hunk, PyObject *a, PyObject *b)
-{
- Py_ssize_t hunksz = PyList_Size(hunk);
- PyObject *s = PyList_GET_ITEM(hunk, hunksz-1);
- char *l = PyBytes_AsString(s);
- Py_ssize_t alen = PyList_Size(a);
- Py_ssize_t blen = PyList_Size(b);
- char c = l[0];
- PyObject *hline;
- Py_ssize_t sz = PyBytes_GET_SIZE(s);
-
- if (sz > 1 && l[sz-2] == '\r')
- /* tolerate CRLF in last line */
- sz -= 1;
-
- hline = PyBytes_FromStringAndSize(l, sz-1);
- if (!hline) {
- return;
- }
-
- if (c == ' ' || c == '+') {
- PyObject *rline = PyBytes_FromStringAndSize(l + 1, sz - 2);
- PyList_SetItem(b, blen-1, rline);
- }
- if (c == ' ' || c == '-') {
- Py_INCREF(hline);
- PyList_SetItem(a, alen-1, hline);
- }
- PyList_SetItem(hunk, hunksz-1, hline);
-}
-
-/* python callable form of _fix_newline */
-static PyObject *
-fix_newline(PyObject *self, PyObject *args)
-{
- PyObject *hunk, *a, *b;
- if (!PyArg_ParseTuple(args, "OOO", &hunk, &a, &b))
- return NULL;
- _fix_newline(hunk, a, b);
- return Py_BuildValue("l", 0);
-}
-
-#if (PY_VERSION_HEX < 0x02050000)
-static const char *addlines_format = "OOiiOO";
-#else
-static const char *addlines_format = "OOnnOO";
-#endif
-
-/*
- * read lines from fp into the hunk. The hunk is parsed into two arrays
- * a and b. a gets the old state of the text, b gets the new state
- * The control char from the hunk is saved when inserting into a, but not b
- * (for performance while deleting files)
- */
-static PyObject *
-addlines(PyObject *self, PyObject *args)
-{
-
- PyObject *fp, *hunk, *a, *b, *x;
- Py_ssize_t i;
- Py_ssize_t lena, lenb;
- Py_ssize_t num;
- Py_ssize_t todoa, todob;
- char *s, c;
- PyObject *l;
- if (!PyArg_ParseTuple(args, addlines_format,
- &fp, &hunk, &lena, &lenb, &a, &b))
- return NULL;
-
- while (1) {
- todoa = lena - PyList_Size(a);
- todob = lenb - PyList_Size(b);
- num = todoa > todob ? todoa : todob;
- if (num == 0)
- break;
- for (i = 0; i < num; i++) {
- x = PyFile_GetLine(fp, 0);
- s = PyBytes_AsString(x);
- c = *s;
- if (strcmp(s, "\\ No newline at end of file\n") == 0) {
- _fix_newline(hunk, a, b);
- continue;
- }
- if (c == '\n') {
- /* Some patches may be missing the control char
- * on empty lines. Supply a leading space. */
- Py_DECREF(x);
- x = PyBytes_FromString(" \n");
- }
- PyList_Append(hunk, x);
- if (c == '+') {
- l = PyBytes_FromString(s + 1);
- PyList_Append(b, l);
- Py_DECREF(l);
- } else if (c == '-') {
- PyList_Append(a, x);
- } else {
- l = PyBytes_FromString(s + 1);
- PyList_Append(b, l);
- Py_DECREF(l);
- PyList_Append(a, x);
- }
- Py_DECREF(x);
- }
- }
- return Py_BuildValue("l", 0);
-}
-
-/*
- * compare the lines in a with the lines in b. a is assumed to have
- * a control char at the start of each line, this char is ignored in the
- * compare
- */
-static PyObject *
-testhunk(PyObject *self, PyObject *args)
-{
-
- PyObject *a, *b;
- long bstart;
- Py_ssize_t alen, blen;
- Py_ssize_t i;
- char *sa, *sb;
-
- if (!PyArg_ParseTuple(args, "OOl", &a, &b, &bstart))
- return NULL;
- alen = PyList_Size(a);
- blen = PyList_Size(b);
- if (alen > blen - bstart || bstart < 0) {
- return Py_BuildValue("l", -1);
- }
- for (i = 0; i < alen; i++) {
- sa = PyBytes_AsString(PyList_GET_ITEM(a, i));
- sb = PyBytes_AsString(PyList_GET_ITEM(b, i + bstart));
- if (strcmp(sa + 1, sb) != 0)
- return Py_BuildValue("l", -1);
- }
- return Py_BuildValue("l", 0);
-}
-
-static PyMethodDef methods[] = {
- {"addlines", addlines, METH_VARARGS, "add lines to a hunk\n"},
- {"fix_newline", fix_newline, METH_VARARGS, "fixup newline counters\n"},
- {"testhunk", testhunk, METH_VARARGS, "test lines in a hunk\n"},
- {NULL, NULL}
-};
-
-#ifdef IS_PY3K
-static struct PyModuleDef diffhelpers_module = {
- PyModuleDef_HEAD_INIT,
- "diffhelpers",
- diffhelpers_doc,
- -1,
- methods
-};
-
-PyMODINIT_FUNC PyInit_diffhelpers(void)
-{
- PyObject *m;
-
- m = PyModule_Create(&diffhelpers_module);
- if (m == NULL)
- return NULL;
-
- diffhelpers_Error = PyErr_NewException("diffhelpers.diffhelpersError",
- NULL, NULL);
- Py_INCREF(diffhelpers_Error);
- PyModule_AddObject(m, "diffhelpersError", diffhelpers_Error);
-
- return m;
-}
-#else
-PyMODINIT_FUNC
-initdiffhelpers(void)
-{
- Py_InitModule3("diffhelpers", methods, diffhelpers_doc);
- diffhelpers_Error = PyErr_NewException("diffhelpers.diffhelpersError",
- NULL, NULL);
-}
-#endif
--- a/mercurial/dirs.c Tue Jun 13 22:24:41 2017 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,315 +0,0 @@
-/*
- dirs.c - dynamic directory diddling for dirstates
-
- Copyright 2013 Facebook
-
- This software may be used and distributed according to the terms of
- the GNU General Public License, incorporated herein by reference.
-*/
-
-#define PY_SSIZE_T_CLEAN
-#include <Python.h>
-#include "util.h"
-
-#ifdef IS_PY3K
-#define PYLONG_VALUE(o) ((PyLongObject *)o)->ob_digit[1]
-#else
-#define PYLONG_VALUE(o) PyInt_AS_LONG(o)
-#endif
-
-/*
- * This is a multiset of directory names, built from the files that
- * appear in a dirstate or manifest.
- *
- * A few implementation notes:
- *
- * We modify Python integers for refcounting, but those integers are
- * never visible to Python code.
- *
- * We mutate strings in-place, but leave them immutable once they can
- * be seen by Python code.
- */
-typedef struct {
- PyObject_HEAD
- PyObject *dict;
-} dirsObject;
-
-static inline Py_ssize_t _finddir(const char *path, Py_ssize_t pos)
-{
- while (pos != -1) {
- if (path[pos] == '/')
- break;
- pos -= 1;
- }
-
- return pos;
-}
-
-static int _addpath(PyObject *dirs, PyObject *path)
-{
- const char *cpath = PyBytes_AS_STRING(path);
- Py_ssize_t pos = PyBytes_GET_SIZE(path);
- PyObject *key = NULL;
- int ret = -1;
-
- /* This loop is super critical for performance. That's why we inline
- * access to Python structs instead of going through a supported API.
- * The implementation, therefore, is heavily dependent on CPython
- * implementation details. We also commit violations of the Python
- * "protocol" such as mutating immutable objects. But since we only
- * mutate objects created in this function or in other well-defined
- * locations, the references are known so these violations should go
- * unnoticed. The code for adjusting the length of a PyBytesObject is
- * essentially a minimal version of _PyBytes_Resize. */
- while ((pos = _finddir(cpath, pos - 1)) != -1) {
- PyObject *val;
-
- /* It's likely that every prefix already has an entry
- in our dict. Try to avoid allocating and
- deallocating a string for each prefix we check. */
- if (key != NULL)
- ((PyBytesObject *)key)->ob_shash = -1;
- else {
- /* Force Python to not reuse a small shared string. */
- key = PyBytes_FromStringAndSize(cpath,
- pos < 2 ? 2 : pos);
- if (key == NULL)
- goto bail;
- }
- /* Py_SIZE(o) refers to the ob_size member of the struct. Yes,
- * assigning to what looks like a function seems wrong. */
- Py_SIZE(key) = pos;
- ((PyBytesObject *)key)->ob_sval[pos] = '\0';
-
- val = PyDict_GetItem(dirs, key);
- if (val != NULL) {
- PYLONG_VALUE(val) += 1;
- break;
- }
-
- /* Force Python to not reuse a small shared int. */
-#ifdef IS_PY3K
- val = PyLong_FromLong(0x1eadbeef);
-#else
- val = PyInt_FromLong(0x1eadbeef);
-#endif
-
- if (val == NULL)
- goto bail;
-
- PYLONG_VALUE(val) = 1;
- ret = PyDict_SetItem(dirs, key, val);
- Py_DECREF(val);
- if (ret == -1)
- goto bail;
- Py_CLEAR(key);
- }
- ret = 0;
-
-bail:
- Py_XDECREF(key);
-
- return ret;
-}
-
-static int _delpath(PyObject *dirs, PyObject *path)
-{
- char *cpath = PyBytes_AS_STRING(path);
- Py_ssize_t pos = PyBytes_GET_SIZE(path);
- PyObject *key = NULL;
- int ret = -1;
-
- while ((pos = _finddir(cpath, pos - 1)) != -1) {
- PyObject *val;
-
- key = PyBytes_FromStringAndSize(cpath, pos);
-
- if (key == NULL)
- goto bail;
-
- val = PyDict_GetItem(dirs, key);
- if (val == NULL) {
- PyErr_SetString(PyExc_ValueError,
- "expected a value, found none");
- goto bail;
- }
-
- if (--PYLONG_VALUE(val) <= 0) {
- if (PyDict_DelItem(dirs, key) == -1)
- goto bail;
- } else
- break;
- Py_CLEAR(key);
- }
- ret = 0;
-
-bail:
- Py_XDECREF(key);
-
- return ret;
-}
-
-static int dirs_fromdict(PyObject *dirs, PyObject *source, char skipchar)
-{
- PyObject *key, *value;
- Py_ssize_t pos = 0;
-
- while (PyDict_Next(source, &pos, &key, &value)) {
- if (!PyBytes_Check(key)) {
- PyErr_SetString(PyExc_TypeError, "expected string key");
- return -1;
- }
- if (skipchar) {
- if (!dirstate_tuple_check(value)) {
- PyErr_SetString(PyExc_TypeError,
- "expected a dirstate tuple");
- return -1;
- }
- if (((dirstateTupleObject *)value)->state == skipchar)
- continue;
- }
-
- if (_addpath(dirs, key) == -1)
- return -1;
- }
-
- return 0;
-}
-
-static int dirs_fromiter(PyObject *dirs, PyObject *source)
-{
- PyObject *iter, *item = NULL;
- int ret;
-
- iter = PyObject_GetIter(source);
- if (iter == NULL)
- return -1;
-
- while ((item = PyIter_Next(iter)) != NULL) {
- if (!PyBytes_Check(item)) {
- PyErr_SetString(PyExc_TypeError, "expected string");
- break;
- }
-
- if (_addpath(dirs, item) == -1)
- break;
- Py_CLEAR(item);
- }
-
- ret = PyErr_Occurred() ? -1 : 0;
- Py_DECREF(iter);
- Py_XDECREF(item);
- return ret;
-}
-
-/*
- * Calculate a refcounted set of directory names for the files in a
- * dirstate.
- */
-static int dirs_init(dirsObject *self, PyObject *args)
-{
- PyObject *dirs = NULL, *source = NULL;
- char skipchar = 0;
- int ret = -1;
-
- self->dict = NULL;
-
- if (!PyArg_ParseTuple(args, "|Oc:__init__", &source, &skipchar))
- return -1;
-
- dirs = PyDict_New();
-
- if (dirs == NULL)
- return -1;
-
- if (source == NULL)
- ret = 0;
- else if (PyDict_Check(source))
- ret = dirs_fromdict(dirs, source, skipchar);
- else if (skipchar)
- PyErr_SetString(PyExc_ValueError,
- "skip character is only supported "
- "with a dict source");
- else
- ret = dirs_fromiter(dirs, source);
-
- if (ret == -1)
- Py_XDECREF(dirs);
- else
- self->dict = dirs;
-
- return ret;
-}
-
-PyObject *dirs_addpath(dirsObject *self, PyObject *args)
-{
- PyObject *path;
-
- if (!PyArg_ParseTuple(args, "O!:addpath", &PyBytes_Type, &path))
- return NULL;
-
- if (_addpath(self->dict, path) == -1)
- return NULL;
-
- Py_RETURN_NONE;
-}
-
-static PyObject *dirs_delpath(dirsObject *self, PyObject *args)
-{
- PyObject *path;
-
- if (!PyArg_ParseTuple(args, "O!:delpath", &PyBytes_Type, &path))
- return NULL;
-
- if (_delpath(self->dict, path) == -1)
- return NULL;
-
- Py_RETURN_NONE;
-}
-
-static int dirs_contains(dirsObject *self, PyObject *value)
-{
- return PyBytes_Check(value) ? PyDict_Contains(self->dict, value) : 0;
-}
-
-static void dirs_dealloc(dirsObject *self)
-{
- Py_XDECREF(self->dict);
- PyObject_Del(self);
-}
-
-static PyObject *dirs_iter(dirsObject *self)
-{
- return PyObject_GetIter(self->dict);
-}
-
-static PySequenceMethods dirs_sequence_methods;
-
-static PyMethodDef dirs_methods[] = {
- {"addpath", (PyCFunction)dirs_addpath, METH_VARARGS, "add a path"},
- {"delpath", (PyCFunction)dirs_delpath, METH_VARARGS, "remove a path"},
- {NULL} /* Sentinel */
-};
-
-static PyTypeObject dirsType = { PyVarObject_HEAD_INIT(NULL, 0) };
-
-void dirs_module_init(PyObject *mod)
-{
- dirs_sequence_methods.sq_contains = (objobjproc)dirs_contains;
- dirsType.tp_name = "parsers.dirs";
- dirsType.tp_new = PyType_GenericNew;
- dirsType.tp_basicsize = sizeof(dirsObject);
- dirsType.tp_dealloc = (destructor)dirs_dealloc;
- dirsType.tp_as_sequence = &dirs_sequence_methods;
- dirsType.tp_flags = Py_TPFLAGS_DEFAULT;
- dirsType.tp_doc = "dirs";
- dirsType.tp_iter = (getiterfunc)dirs_iter;
- dirsType.tp_methods = dirs_methods;
- dirsType.tp_init = (initproc)dirs_init;
-
- if (PyType_Ready(&dirsType) < 0)
- return;
- Py_INCREF(&dirsType);
-
- PyModule_AddObject(mod, "dirs", (PyObject *)&dirsType);
-}
--- a/mercurial/dirstate.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/dirstate.py Tue Jun 20 16:33:46 2017 -0400
@@ -8,6 +8,7 @@
from __future__ import absolute_import
import collections
+import contextlib
import errno
import os
import stat
@@ -18,15 +19,16 @@
encoding,
error,
match as matchmod,
- osutil,
- parsers,
pathutil,
+ policy,
pycompat,
scmutil,
txnutil,
util,
)
+parsers = policy.importmod(r'parsers')
+
propertycache = util.propertycache
filecache = scmutil.filecache
_rangemask = 0x7fffffff
@@ -100,6 +102,23 @@
# for consistent view between _pl() and _read() invocations
self._pendingmode = None
+ @contextlib.contextmanager
+ def parentchange(self):
+ '''Context manager for handling dirstate parents.
+
+ If an exception occurs in the scope of the context manager,
+ the incoherent dirstate won't be written when wlock is
+ released.
+ '''
+ self._parentwriters += 1
+ yield
+ # Typically we want the "undo" step of a context manager in a
+ # finally block so it happens even when an exception
+ # occurs. In this case, however, we only want to decrement
+ # parentwriters if the code in the with statement exits
+ # normally, so we don't have a try/finally here on purpose.
+ self._parentwriters -= 1
+
def beginparentchange(self):
'''Marks the beginning of a set of changes that involve changing
the dirstate parents. If there is an exception during this time,
@@ -107,6 +126,8 @@
prevents writing an incoherent dirstate where the parent doesn't
match the contents.
'''
+ self._ui.deprecwarn('beginparentchange is obsoleted by the '
+ 'parentchange context manager.', '4.3')
self._parentwriters += 1
def endparentchange(self):
@@ -114,6 +135,8 @@
dirstate parents. Once all parent changes have been marked done,
the wlock will be free to write the dirstate on release.
'''
+ self._ui.deprecwarn('endparentchange is obsoleted by the '
+ 'parentchange context manager.', '4.3')
if self._parentwriters > 0:
self._parentwriters -= 1
@@ -136,6 +159,11 @@
return self._copymap
@propertycache
+ def _identity(self):
+ self._read()
+ return self._identity
+
+ @propertycache
def _nonnormalset(self):
nonnorm, otherparents = nonnormalentries(self._map)
self._otherparentset = otherparents
@@ -209,7 +237,7 @@
def _ignore(self):
files = self._ignorefiles()
if not files:
- return util.never
+ return matchmod.never(self._root, '')
pats = ['include:%s' % f for f in files]
return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
@@ -320,9 +348,11 @@
for x in sorted(self._map):
yield x
- def iteritems(self):
+ def items(self):
return self._map.iteritems()
+ iteritems = items
+
def parents(self):
return [self._validate(p) for p in self._pl]
@@ -401,6 +431,9 @@
def _read(self):
self._map = {}
self._copymap = {}
+ # ignore HG_PENDING because identity is used only for writing
+ self._identity = util.filestat.frompath(
+ self._opener.join(self._filename))
try:
fp = self._opendirstatefile()
try:
@@ -445,7 +478,14 @@
self._pl = p
def invalidate(self):
- for a in ("_map", "_copymap", "_filefoldmap", "_dirfoldmap", "_branch",
+ '''Causes the next access to reread the dirstate.
+
+ This is different from localrepo.invalidatedirstate() because it always
+ rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
+ check whether the dirstate has changed before rereading it.'''
+
+ for a in ("_map", "_copymap", "_identity",
+ "_filefoldmap", "_dirfoldmap", "_branch",
"_pl", "_dirs", "_ignore", "_nonnormalset",
"_otherparentset"):
if a in self.__dict__:
@@ -710,6 +750,14 @@
self._dirty = True
+ def identity(self):
+ '''Return identity of dirstate itself to detect changing in storage
+
+ If identity of previous dirstate is equal to this, writing
+ changes based on the former dirstate out can keep consistency.
+ '''
+ return self._identity
+
def write(self, tr):
if not self._dirty:
return
@@ -988,7 +1036,7 @@
matchalways = match.always()
matchtdir = match.traversedir
dmap = self._map
- listdir = osutil.listdir
+ listdir = util.listdir
lstat = os.lstat
dirkind = stat.S_IFDIR
regkind = stat.S_IFREG
@@ -1021,6 +1069,8 @@
wadd = work.append
while work:
nd = work.pop()
+ if not match.visitdir(nd):
+ continue
skip = None
if nd == '.':
nd = ''
--- a/mercurial/discovery.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/discovery.py Tue Jun 20 16:33:46 2017 -0400
@@ -182,17 +182,22 @@
return og
-def _headssummary(repo, remote, outgoing):
+def _headssummary(pushop):
"""compute a summary of branch and heads status before and after push
- return {'branch': ([remoteheads], [newheads], [unsyncedheads])} mapping
+ return {'branch': ([remoteheads], [newheads],
+ [unsyncedheads], [discardedheads])} mapping
- - branch: the branch name
+ - branch: the branch name,
- remoteheads: the list of remote heads known locally
- None if the branch is new
- - newheads: the new remote heads (known locally) with outgoing pushed
- - unsyncedheads: the list of remote heads unknown locally.
+ None if the branch is new,
+ - newheads: the new remote heads (known locally) with outgoing pushed,
+ - unsyncedheads: the list of remote heads unknown locally,
+ - discardedheads: the list of heads made obsolete by the push.
"""
+ repo = pushop.repo.unfiltered()
+ remote = pushop.remote
+ outgoing = pushop.outgoing
cl = repo.changelog
headssum = {}
# A. Create set of branches involved in the push.
@@ -235,6 +240,23 @@
newmap.update(repo, (ctx.rev() for ctx in missingctx))
for branch, newheads in newmap.iteritems():
headssum[branch][1][:] = newheads
+ for branch, items in headssum.iteritems():
+ for l in items:
+ if l is not None:
+ l.sort()
+ headssum[branch] = items + ([],)
+
+ # If there are no obsstore, no post processing are needed.
+ if repo.obsstore:
+ torev = repo.changelog.rev
+ futureheads = set(torev(h) for h in outgoing.missingheads)
+ futureheads |= set(torev(h) for h in outgoing.commonheads)
+ allfuturecommon = repo.changelog.ancestors(futureheads, inclusive=True)
+ for branch, heads in sorted(headssum.iteritems()):
+ remoteheads, newheads, unsyncedheads, placeholder = heads
+ result = _postprocessobsolete(pushop, allfuturecommon, newheads)
+ headssum[branch] = (remoteheads, sorted(result[0]), unsyncedheads,
+ sorted(result[1]))
return headssum
def _oldheadssummary(repo, remoteheads, outgoing, inc=False):
@@ -244,20 +266,20 @@
# Construct {old,new}map with branch = None (topological branch).
# (code based on update)
knownnode = repo.changelog.hasnode # no nodemap until it is filtered
- oldheads = set(h for h in remoteheads if knownnode(h))
+ oldheads = sorted(h for h in remoteheads if knownnode(h))
# all nodes in outgoing.missing are children of either:
# - an element of oldheads
# - another element of outgoing.missing
# - nullrev
# This explains why the new head are very simple to compute.
r = repo.set('heads(%ln + %ln)', oldheads, outgoing.missing)
- newheads = list(c.node() for c in r)
+ newheads = sorted(c.node() for c in r)
# set some unsynced head to issue the "unsynced changes" warning
if inc:
- unsynced = set([None])
+ unsynced = [None]
else:
- unsynced = set()
- return {None: (oldheads, newheads, unsynced)}
+ unsynced = []
+ return {None: (oldheads, newheads, unsynced, [])}
def _nowarnheads(pushop):
# Compute newly pushed bookmarks. We don't warn about bookmarked heads.
@@ -307,9 +329,10 @@
return
if remote.capable('branchmap'):
- headssum = _headssummary(repo, remote, outgoing)
+ headssum = _headssummary(pushop)
else:
headssum = _oldheadssummary(repo, remoteheads, outgoing, inc)
+ pushop.pushbranchmap = headssum
newbranches = [branch for branch, heads in headssum.iteritems()
if heads[0] is None]
# 1. Check for new branches on the remote.
@@ -327,41 +350,26 @@
# If there are more heads after the push than before, a suitable
# error message, depending on unsynced status, is displayed.
errormsg = None
- # If there is no obsstore, allfuturecommon won't be used, so no
- # need to compute it.
- if repo.obsstore:
- allmissing = set(outgoing.missing)
- cctx = repo.set('%ld', outgoing.common)
- allfuturecommon = set(c.node() for c in cctx)
- allfuturecommon.update(allmissing)
for branch, heads in sorted(headssum.iteritems()):
- remoteheads, newheads, unsyncedheads = heads
- candidate_newhs = set(newheads)
+ remoteheads, newheads, unsyncedheads, discardedheads = heads
# add unsynced data
if remoteheads is None:
oldhs = set()
else:
oldhs = set(remoteheads)
oldhs.update(unsyncedheads)
- candidate_newhs.update(unsyncedheads)
dhs = None # delta heads, the new heads on branch
- if not repo.obsstore:
- discardedheads = set()
- newhs = candidate_newhs
- else:
- newhs, discardedheads = _postprocessobsolete(pushop,
- allfuturecommon,
- candidate_newhs)
- unsynced = sorted(h for h in unsyncedheads if h not in discardedheads)
- if unsynced:
- if None in unsynced:
+ newhs = set(newheads)
+ newhs.update(unsyncedheads)
+ if unsyncedheads:
+ if None in unsyncedheads:
# old remote, no heads data
heads = None
- elif len(unsynced) <= 4 or repo.ui.verbose:
- heads = ' '.join(short(h) for h in unsynced)
+ elif len(unsyncedheads) <= 4 or repo.ui.verbose:
+ heads = ' '.join(short(h) for h in unsyncedheads)
else:
- heads = (' '.join(short(h) for h in unsynced[:4]) +
- ' ' + _("and %s others") % (len(unsynced) - 4))
+ heads = (' '.join(short(h) for h in unsyncedheads[:4]) +
+ ' ' + _("and %s others") % (len(unsyncedheads) - 4))
if heads is None:
repo.ui.status(_("remote has heads that are "
"not known locally\n"))
@@ -431,11 +439,12 @@
repo = pushop.repo
unfi = repo.unfiltered()
tonode = unfi.changelog.node
+ torev = unfi.changelog.nodemap.get
public = phases.public
getphase = unfi._phasecache.phase
ispublic = (lambda r: getphase(unfi, r) == public)
- hasoutmarker = functools.partial(pushingmarkerfor, unfi.obsstore,
- futurecommon)
+ ispushed = (lambda n: torev(n) in futurecommon)
+ hasoutmarker = functools.partial(pushingmarkerfor, unfi.obsstore, ispushed)
successorsmarkers = unfi.obsstore.successors
newhs = set() # final set of new heads
discarded = set() # new head of fully replaced branch
@@ -460,8 +469,7 @@
while localcandidate:
nh = localcandidate.pop()
# run this check early to skip the evaluation of the whole branch
- if (nh in futurecommon
- or unfi[nh].phase() <= public):
+ if (torev(nh) in futurecommon or ispublic(torev(nh))):
newhs.add(nh)
continue
@@ -476,7 +484,7 @@
# * any part of it is considered part of the result by previous logic,
# * if we have no markers to push to obsolete it.
if (any(ispublic(r) for r in branchrevs)
- or any(n in futurecommon for n in branchnodes)
+ or any(torev(n) in futurecommon for n in branchnodes)
or any(not hasoutmarker(n) for n in branchnodes)):
newhs.add(nh)
else:
@@ -488,7 +496,7 @@
newhs |= unknownheads
return newhs, discarded
-def pushingmarkerfor(obsstore, pushset, node):
+def pushingmarkerfor(obsstore, ispushed, node):
"""true if some markers are to be pushed for node
We cannot just look in to the pushed obsmarkers from the pushop because
@@ -504,7 +512,7 @@
seen = set(stack)
while stack:
current = stack.pop()
- if current in pushset:
+ if ispushed(current):
return True
markers = successorsmarkers.get(current, ())
# markers fields = ('prec', 'succs', 'flag', 'meta', 'date', 'parents')
--- a/mercurial/dispatch.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/dispatch.py Tue Jun 20 16:33:46 2017 -0400
@@ -25,7 +25,6 @@
cmdutil,
color,
commands,
- debugcommands,
demandimport,
encoding,
error,
@@ -48,7 +47,7 @@
class request(object):
def __init__(self, args, ui=None, repo=None, fin=None, fout=None,
- ferr=None):
+ ferr=None, prereposetups=None):
self.args = args
self.ui = ui
self.repo = repo
@@ -58,6 +57,10 @@
self.fout = fout
self.ferr = ferr
+ # reposetups which run before extensions, useful for chg to pre-fill
+ # low-level repo state (for example, changelog) before extensions.
+ self.prereposetups = prereposetups or []
+
def _runexithandlers(self):
exc = None
handlers = self.ui._exithandlers
@@ -85,13 +88,13 @@
status = -1
if util.safehasattr(req.ui, 'fout'):
try:
- req.ui.fout.close()
+ req.ui.fout.flush()
except IOError as err:
status = -1
if util.safehasattr(req.ui, 'ferr'):
if err is not None and err.errno != errno.EPIPE:
req.ui.ferr.write('abort: %s\n' % err.strerror)
- req.ui.ferr.close()
+ req.ui.ferr.flush()
sys.exit(status & 255)
def _getsimilar(symbols, value):
@@ -162,9 +165,18 @@
ret = None
try:
ret = _runcatch(req)
- except KeyboardInterrupt:
+ except error.ProgrammingError as inst:
+ req.ui.warn(_('** ProgrammingError: %s\n') % inst)
+ if inst.hint:
+ req.ui.warn(_('** (%s)\n') % inst.hint)
+ raise
+ except KeyboardInterrupt as inst:
try:
- req.ui.warn(_("interrupted!\n"))
+ if isinstance(inst, error.SignalInterrupt):
+ msg = _("killed!\n")
+ else:
+ msg = _("interrupted!\n")
+ req.ui.warn(msg)
except error.SignalInterrupt:
# maybe pager would quit without consuming all the output, and
# SIGPIPE was raised. we cannot print anything in this case.
@@ -179,7 +191,7 @@
if req.ui.logblockedtimes:
req.ui._blockedtimes['command_duration'] = duration * 1000
req.ui.log('uiblocked', 'ui blocked ms', **req.ui._blockedtimes)
- req.ui.log("commandfinish", "%s exited %s after %0.2f seconds\n",
+ req.ui.log("commandfinish", "%s exited %d after %0.2f seconds\n",
msg, ret or 0, duration)
try:
req._runexithandlers()
@@ -307,7 +319,8 @@
except error.CommandError as inst:
if inst.args[0]:
ui.pager('help')
- ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
+ msgbytes = pycompat.bytestr(inst.args[1])
+ ui.warn(_("hg %s: %s\n") % (inst.args[0], msgbytes))
commands.help_(ui, inst.args[0], full=False, command=True)
else:
ui.pager('help')
@@ -321,7 +334,8 @@
try:
# check if the command is in a disabled extension
# (but don't check for extensions themselves)
- formatted = help.formattedhelp(ui, inst.args[0], unknowncmd=True)
+ formatted = help.formattedhelp(ui, commands, inst.args[0],
+ unknowncmd=True)
ui.warn(nocmdmsg)
ui.write(formatted)
except (error.UnknownCommand, error.Abort):
@@ -475,7 +489,8 @@
return aliasargs(self.fn, args)
def __getattr__(self, name):
- adefaults = {'norepo': True, 'optionalrepo': False, 'inferrepo': False}
+ adefaults = {r'norepo': True,
+ r'optionalrepo': False, r'inferrepo': False}
if name not in adefaults:
raise AttributeError(name)
if self.badalias or util.safehasattr(self, 'shell'):
@@ -740,11 +755,7 @@
rpath = _earlygetopt(["-R", "--repository", "--repo"], args)
path, lui = _getlocal(ui, rpath)
- # Side-effect of accessing is debugcommands module is guaranteed to be
- # imported and commands.table is populated.
- debugcommands.command
-
- uis = set([ui, lui])
+ uis = {ui, lui}
if req.repo:
uis.add(req.repo.ui)
@@ -753,7 +764,8 @@
for ui_ in uis:
ui_.setconfig('profiling', 'enabled', 'true', '--profile')
- with profiling.maybeprofile(lui):
+ profile = lui.configbool('profiling', 'enabled')
+ with profiling.profile(lui, enabled=profile) as profiler:
# Configure extensions in phases: uisetup, extsetup, cmdtable, and
# reposetup. Programs like TortoiseHg will call _dispatch several
# times so we keep track of configured extensions in _loaded.
@@ -816,6 +828,8 @@
_("time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
(t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
ui.atexit(print_time)
+ if options["profile"]:
+ profiler.start()
if options['verbose'] or options['debug'] or options['quiet']:
for opt in ('verbose', 'debug', 'quiet'):
@@ -871,7 +885,8 @@
repo.ui.ferr = ui.ferr
else:
try:
- repo = hg.repository(ui, path=path)
+ repo = hg.repository(ui, path=path,
+ presetupfuncs=req.prereposetups)
if not repo.local():
raise error.Abort(_("repository '%s' is not local")
% path)
--- a/mercurial/encoding.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/encoding.py Tue Jun 20 16:33:46 2017 -0400
@@ -14,6 +14,7 @@
from . import (
error,
+ policy,
pycompat,
)
@@ -29,10 +30,7 @@
"200c 200d 200e 200f 202a 202b 202c 202d 202e "
"206a 206b 206c 206d 206e 206f feff".split()]
# verify the next function will work
-if pycompat.ispy3:
- assert set(i[0] for i in _ignore) == set([ord(b'\xe2'), ord(b'\xef')])
-else:
- assert set(i[0] for i in _ignore) == set(["\xe2", "\xef"])
+assert all(i.startswith(("\xe2", "\xef")) for i in _ignore)
def hfsignoreclean(s):
"""Remove codepoints ignored by HFS+ from s.
@@ -51,43 +49,18 @@
# the process environment
_nativeenviron = (not pycompat.ispy3 or os.supports_bytes_environ)
if not pycompat.ispy3:
- environ = os.environ
+ environ = os.environ # re-exports
elif _nativeenviron:
- environ = os.environb
+ environ = os.environb # re-exports
else:
# preferred encoding isn't known yet; use utf-8 to avoid unicode error
# and recreate it once encoding is settled
environ = dict((k.encode(u'utf-8'), v.encode(u'utf-8'))
- for k, v in os.environ.items())
-
-def _getpreferredencoding():
- '''
- On darwin, getpreferredencoding ignores the locale environment and
- always returns mac-roman. http://bugs.python.org/issue6202 fixes this
- for Python 2.7 and up. This is the same corrected code for earlier
- Python versions.
-
- However, we can't use a version check for this method, as some distributions
- patch Python to fix this. Instead, we use it as a 'fixer' for the mac-roman
- encoding, as it is unlikely that this encoding is the actually expected.
- '''
- try:
- locale.CODESET
- except AttributeError:
- # Fall back to parsing environment variables :-(
- return locale.getdefaultlocale()[1]
-
- oldloc = locale.setlocale(locale.LC_CTYPE)
- locale.setlocale(locale.LC_CTYPE, "")
- result = locale.nl_langinfo(locale.CODESET)
- locale.setlocale(locale.LC_CTYPE, oldloc)
-
- return result
+ for k, v in os.environ.items()) # re-exports
_encodingfixers = {
'646': lambda: 'ascii',
'ANSI_X3.4-1968': lambda: 'ascii',
- 'mac-roman': _getpreferredencoding
}
try:
@@ -218,11 +191,11 @@
# now encoding and helper functions are available, recreate the environ
# dict to be exported to other modules
environ = dict((tolocal(k.encode(u'utf-8')), tolocal(v.encode(u'utf-8')))
- for k, v in os.environ.items())
+ for k, v in os.environ.items()) # re-exports
# How to treat ambiguous-width characters. Set to 'wide' to treat as wide.
-wide = (environ.get("HGENCODINGAMBIGUOUS", "narrow") == "wide"
- and "WFA" or "WF")
+_wide = _sysstr(environ.get("HGENCODINGAMBIGUOUS", "narrow") == "wide"
+ and "WFA" or "WF")
def colwidth(s):
"Find the column width of a string for display in the local encoding"
@@ -232,7 +205,7 @@
"Find the column width of a Unicode string for display"
eaw = getattr(unicodedata, 'east_asian_width', None)
if eaw is not None:
- return sum([eaw(c) in wide and 2 or 1 for c in d])
+ return sum([eaw(c) in _wide and 2 or 1 for c in d])
return len(d)
def getcols(s, start, c):
@@ -346,7 +319,7 @@
def asciilower(s):
# delay importing avoids cyclic dependency around "parsers" in
# pure Python build (util => i18n => encoding => parsers => util)
- from . import parsers
+ parsers = policy.importmod(r'parsers')
impl = getattr(parsers, 'asciilower', _asciilower)
global asciilower
asciilower = impl
@@ -362,7 +335,7 @@
def asciiupper(s):
# delay importing avoids cyclic dependency around "parsers" in
# pure Python build (util => i18n => encoding => parsers => util)
- from . import parsers
+ parsers = policy.importmod(r'parsers')
impl = getattr(parsers, 'asciiupper', _asciiupper)
global asciiupper
asciiupper = impl
@@ -429,7 +402,7 @@
_jsonmap = []
_jsonmap.extend("\\u%04x" % x for x in range(32))
-_jsonmap.extend(chr(x) for x in range(32, 127))
+_jsonmap.extend(pycompat.bytechr(x) for x in range(32, 127))
_jsonmap.append('\\u007f')
_jsonmap[0x09] = '\\t'
_jsonmap[0x0a] = '\\n'
@@ -441,7 +414,7 @@
_paranoidjsonmap = _jsonmap[:]
_paranoidjsonmap[0x3c] = '\\u003c' # '<' (e.g. escape "</script>")
_paranoidjsonmap[0x3e] = '\\u003e' # '>'
-_jsonmap.extend(chr(x) for x in range(128, 256))
+_jsonmap.extend(pycompat.bytechr(x) for x in range(128, 256))
def jsonescape(s, paranoid=False):
'''returns a string suitable for JSON
--- a/mercurial/error.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/error.py Tue Jun 20 16:33:46 2017 -0400
@@ -13,7 +13,16 @@
from __future__ import absolute_import
-# Do not import anything here, please
+# Do not import anything but pycompat here, please
+from . import pycompat
+
+def _tobytes(exc):
+ """Byte-stringify exception in the same way as BaseException_str()"""
+ if not exc.args:
+ return b''
+ if len(exc.args) == 1:
+ return pycompat.bytestr(exc.args[0])
+ return b'(%s)' % b', '.join(b"'%s'" % pycompat.bytestr(a) for a in exc.args)
class Hint(object):
"""Mix-in to provide a hint of an error
@@ -26,10 +35,10 @@
super(Hint, self).__init__(*args, **kw)
class RevlogError(Hint, Exception):
- pass
+ __bytes__ = _tobytes
class FilteredIndexError(IndexError):
- pass
+ __bytes__ = _tobytes
class LookupError(RevlogError, KeyError):
def __init__(self, name, index, message):
@@ -43,6 +52,9 @@
name = short(name)
RevlogError.__init__(self, '%s@%s: %s' % (index, name, message))
+ def __bytes__(self):
+ return RevlogError.__bytes__(self)
+
def __str__(self):
return RevlogError.__str__(self)
@@ -54,12 +66,15 @@
class CommandError(Exception):
"""Exception raised on errors in parsing the command line."""
+ __bytes__ = _tobytes
class InterventionRequired(Hint, Exception):
"""Exception raised when a command requires human intervention."""
+ __bytes__ = _tobytes
class Abort(Hint, Exception):
"""Raised if a command needs to print an error and exit."""
+ __bytes__ = _tobytes
class HookLoadError(Abort):
"""raised when loading a hook fails, aborting an operation
@@ -94,9 +109,11 @@
class OutOfBandError(Hint, Exception):
"""Exception raised when a remote repo reports failure"""
+ __bytes__ = _tobytes
class ParseError(Hint, Exception):
"""Raised when parsing config files and {rev,file}sets (msg[, pos])"""
+ __bytes__ = _tobytes
class UnknownIdentifier(ParseError):
"""Exception raised when a {rev,file}set references an unknown identifier"""
@@ -108,7 +125,7 @@
self.symbols = symbols
class RepoError(Hint, Exception):
- pass
+ __bytes__ = _tobytes
class RepoLookupError(RepoError):
pass
@@ -128,6 +145,8 @@
def __init__(self, err):
IOError.__init__(self, err.errno, err.strerror)
+ # no __bytes__() because error message is derived from the standard IOError
+
class UnsupportedMergeRecords(Abort):
def __init__(self, recordtypes):
from .i18n import _
@@ -138,11 +157,21 @@
hint=_('see https://mercurial-scm.org/wiki/MergeStateRecords for '
'more information'))
+class UnknownVersion(Abort):
+ """generic exception for aborting from an encounter with an unknown version
+ """
+
+ def __init__(self, msg, hint=None, version=None):
+ self.version = version
+ super(UnknownVersion, self).__init__(msg, hint=hint)
+
class LockError(IOError):
def __init__(self, errno, strerror, filename, desc):
IOError.__init__(self, errno, strerror, filename)
self.desc = desc
+ # no __bytes__() because error message is derived from the standard IOError
+
class LockHeld(LockError):
def __init__(self, errno, filename, desc, locker):
LockError.__init__(self, errno, 'Lock held', filename, desc)
@@ -153,33 +182,43 @@
# LockError is for errors while acquiring the lock -- this is unrelated
class LockInheritanceContractViolation(RuntimeError):
- pass
+ __bytes__ = _tobytes
class ResponseError(Exception):
"""Raised to print an error with part of output and exit."""
+ __bytes__ = _tobytes
class UnknownCommand(Exception):
"""Exception raised if command is not in the command table."""
+ __bytes__ = _tobytes
class AmbiguousCommand(Exception):
"""Exception raised if command shortcut matches more than one command."""
+ __bytes__ = _tobytes
# derived from KeyboardInterrupt to simplify some breakout code
class SignalInterrupt(KeyboardInterrupt):
"""Exception raised on SIGTERM and SIGHUP."""
class SignatureError(Exception):
- pass
+ __bytes__ = _tobytes
class PushRaced(RuntimeError):
"""An exception raised during unbundling that indicate a push race"""
+ __bytes__ = _tobytes
-class ProgrammingError(RuntimeError):
+class ProgrammingError(Hint, RuntimeError):
"""Raised if a mercurial (core or extension) developer made a mistake"""
+ __bytes__ = _tobytes
+
+class WdirUnsupported(Exception):
+ """An exception which is raised when 'wdir()' is not supported"""
+ __bytes__ = _tobytes
# bundle2 related errors
class BundleValueError(ValueError):
"""error raised when bundle2 cannot be processed"""
+ __bytes__ = _tobytes
class BundleUnknownFeatureError(BundleValueError):
def __init__(self, parttype=None, params=(), values=()):
@@ -206,6 +245,7 @@
class ReadOnlyPartError(RuntimeError):
"""error raised when code tries to alter a part being generated"""
+ __bytes__ = _tobytes
class PushkeyFailed(Abort):
"""error raised when a pushkey part failed to update a value"""
@@ -246,12 +286,15 @@
This is used for syntax errors as opposed to support errors.
"""
+ __bytes__ = _tobytes
class UnsupportedBundleSpecification(Exception):
"""error raised when a bundle specification is not supported."""
+ __bytes__ = _tobytes
class CorruptedState(Exception):
"""error raised when a command is not able to read its state from file"""
+ __bytes__ = _tobytes
class PeerTransportError(Abort):
"""Transport-level I/O error when communicating with a peer repo."""
--- a/mercurial/exchange.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/exchange.py Tue Jun 20 16:33:46 2017 -0400
@@ -16,7 +16,6 @@
nullid,
)
from . import (
- base85,
bookmarks as bookmod,
bundle2,
changegroup,
@@ -26,10 +25,10 @@
obsolete,
phases,
pushkey,
+ pycompat,
scmutil,
sslutil,
streamclone,
- tags,
url as urlmod,
util,
)
@@ -45,7 +44,7 @@
}
# Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
-_bundlespecv1compengines = set(['gzip', 'bzip2', 'none'])
+_bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
def parsebundlespec(repo, spec, strict=True, externalnames=False):
"""Parse a bundle string specification into parts.
@@ -250,21 +249,6 @@
else:
raise error.Abort(_('unknown bundle type: %s') % b)
-def buildobsmarkerspart(bundler, markers):
- """add an obsmarker part to the bundler with <markers>
-
- No part is created if markers is empty.
- Raises ValueError if the bundler doesn't support any known obsmarker format.
- """
- if markers:
- remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
- version = obsolete.commonversion(remoteversions)
- if version is None:
- raise ValueError('bundler does not support common obsmarker format')
- stream = obsolete.encodemarkers(markers, True, version=version)
- return bundler.newpart('obsmarkers', data=stream)
- return None
-
def _computeoutgoing(repo, heads, common):
"""Computes which revs are outgoing given a set of common
and a set of heads.
@@ -340,8 +324,21 @@
self.bkresult = None
# discover.outgoing object (contains common and outgoing data)
self.outgoing = None
- # all remote heads before the push
+ # all remote topological heads before the push
self.remoteheads = None
+ # Details of the remote branch pre and post push
+ #
+ # mapping: {'branch': ([remoteheads],
+ # [newheads],
+ # [unsyncedheads],
+ # [discardedheads])}
+ # - branch: the branch name
+ # - remoteheads: the list of remote heads known locally
+ # None if the branch is new
+ # - newheads: the new remote heads (known locally) with outgoing pushed
+ # - unsyncedheads: the list of remote heads unknown locally.
+ # - discardedheads: the list of remote heads made obsolete by the push
+ self.pushbranchmap = None
# testable as a boolean indicating if any nodes are missing locally.
self.incoming = None
# phases changes that must be pushed along side the changesets
@@ -729,8 +726,23 @@
Exists as an independent function to aid extensions
"""
- if not pushop.force:
- bundler.newpart('check:heads', data=iter(pushop.remoteheads))
+ # * 'force' do not check for push race,
+ # * if we don't push anything, there are nothing to check.
+ if not pushop.force and pushop.outgoing.missingheads:
+ allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
+ if not allowunrelated:
+ bundler.newpart('check:heads', data=iter(pushop.remoteheads))
+ else:
+ affected = set()
+ for branch, heads in pushop.pushbranchmap.iteritems():
+ remoteheads, newheads, unsyncedheads, discardedheads = heads
+ if remoteheads is not None:
+ remote = set(remoteheads)
+ affected |= set(discardedheads) & remote
+ affected |= remote - set(newheads)
+ if affected:
+ data = iter(sorted(affected))
+ bundler.newpart('check:updated-heads', data=data)
@b2partsgenerator('changeset')
def _pushb2ctx(pushop, bundler):
@@ -824,7 +836,7 @@
pushop.stepsdone.add('obsmarkers')
if pushop.outobsmarkers:
markers = sorted(pushop.outobsmarkers)
- buildobsmarkerspart(bundler, markers)
+ bundle2.buildobsmarkerspart(bundler, markers)
@b2partsgenerator('bookmarks')
def _pushb2bookmarks(pushop, bundler):
@@ -952,8 +964,8 @@
'push',
fastpath=True)
else:
- cg = changegroup.getlocalchangegroup(pushop.repo, 'push', outgoing,
- bundlecaps)
+ cg = changegroup.getchangegroup(pushop.repo, 'push', outgoing,
+ bundlecaps=bundlecaps)
# apply changegroup to remote
if unbundle:
@@ -1335,7 +1347,9 @@
For now, the only supported data are changegroup."""
kwargs = {'bundlecaps': caps20to10(pullop.repo)}
- streaming, streamreqs = streamclone.canperformstreamclone(pullop)
+ # At the moment we don't do stream clones over bundle2. If that is
+ # implemented then here's where the check for that will go.
+ streaming = False
# pulling changegroup
pullop.stepsdone.add('changegroup')
@@ -1373,7 +1387,7 @@
kwargs['obsmarkers'] = True
pullop.stepsdone.add('obsmarkers')
_pullbundle2extraprepare(pullop, kwargs)
- bundle = pullop.remote.getbundle('pull', **kwargs)
+ bundle = pullop.remote.getbundle('pull', **pycompat.strkwargs(kwargs))
try:
op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
except bundle2.AbortFromPart as exc:
@@ -1416,7 +1430,7 @@
pullop.repo.ui.status(_("no changes found\n"))
pullop.cgresult = 0
return
- pullop.gettransaction()
+ tr = pullop.gettransaction()
if pullop.heads is None and list(pullop.common) == [nullid]:
pullop.repo.ui.status(_("requesting all changes\n"))
elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
@@ -1435,7 +1449,7 @@
"changegroupsubset."))
else:
cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
- pullop.cgresult = cg.apply(pullop.repo, 'pull', pullop.remote.url())
+ pullop.cgresult = cg.apply(pullop.repo, tr, 'pull', pullop.remote.url())
def _pullphase(pullop):
# Get remote phases data from remote
@@ -1512,7 +1526,7 @@
markers = []
for key in sorted(remoteobs, reverse=True):
if key.startswith('dump'):
- data = base85.b85decode(remoteobs[key])
+ data = util.b85decode(remoteobs[key])
version, newmarks = obsolete._readmarkers(data)
markers += newmarks
if markers:
@@ -1522,7 +1536,7 @@
def caps20to10(repo):
"""return a set with appropriate options to use bundle20 during getbundle"""
- caps = set(['HG20'])
+ caps = {'HG20'}
capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
caps.add('bundle2=' + urlreq.quote(capsblob))
return caps
@@ -1648,7 +1662,7 @@
subset = [c.node() for c in repo.set('::%ln', heads)]
markers = repo.obsstore.relevantmarkers(subset)
markers = sorted(markers)
- buildobsmarkerspart(bundler, markers)
+ bundle2.buildobsmarkerspart(bundler, markers)
@getbundle2partsgenerator('hgtagsfnodes')
def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
@@ -1668,30 +1682,7 @@
return
outgoing = _computeoutgoing(repo, heads, common)
-
- if not outgoing.missingheads:
- return
-
- cache = tags.hgtagsfnodescache(repo.unfiltered())
- chunks = []
-
- # .hgtags fnodes are only relevant for head changesets. While we could
- # transfer values for all known nodes, there will likely be little to
- # no benefit.
- #
- # We don't bother using a generator to produce output data because
- # a) we only have 40 bytes per head and even esoteric numbers of heads
- # consume little memory (1M heads is 40MB) b) we don't want to send the
- # part if we don't have entries and knowing if we have entries requires
- # cache lookups.
- for node in outgoing.missingheads:
- # Don't compute missing, as this may slow down serving.
- fnode = cache.getfnode(node, computemissing=False)
- if fnode is not None:
- chunks.extend([node, fnode])
-
- if chunks:
- bundler.newpart('hgtagsfnodes', data=''.join(chunks))
+ bundle2.addparttagsfnodescache(repo, bundler, outgoing)
def _getbookmarks(repo, **kwargs):
"""Returns bookmark to node mapping.
@@ -1741,10 +1732,11 @@
# 'check_heads' call wil be a no-op
check_heads(repo, heads, 'uploading changes')
# push can proceed
- if not util.safehasattr(cg, 'params'):
+ if not isinstance(cg, bundle2.unbundle20):
# legacy case: bundle1 (changegroup 01)
- lockandtr[1] = repo.lock()
- r = cg.apply(repo, source, url)
+ txnname = "\n".join([source, util.hidepassword(url)])
+ with repo.lock(), repo.transaction(txnname) as tr:
+ r = cg.apply(repo, tr, source, url)
else:
r = None
try:
@@ -1999,29 +1991,21 @@
def trypullbundlefromurl(ui, repo, url):
"""Attempt to apply a bundle from a URL."""
- lock = repo.lock()
- try:
- tr = repo.transaction('bundleurl')
+ with repo.lock(), repo.transaction('bundleurl') as tr:
try:
- try:
- fh = urlmod.open(ui, url)
- cg = readbundle(ui, fh, 'stream')
+ fh = urlmod.open(ui, url)
+ cg = readbundle(ui, fh, 'stream')
- if isinstance(cg, bundle2.unbundle20):
- bundle2.processbundle(repo, cg, lambda: tr)
- elif isinstance(cg, streamclone.streamcloneapplier):
- cg.apply(repo)
- else:
- cg.apply(repo, 'clonebundles', url)
- tr.close()
- return True
- except urlerr.httperror as e:
- ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
- except urlerr.urlerror as e:
- ui.warn(_('error fetching bundle: %s\n') % e.reason)
+ if isinstance(cg, bundle2.unbundle20):
+ bundle2.processbundle(repo, cg, lambda: tr)
+ elif isinstance(cg, streamclone.streamcloneapplier):
+ cg.apply(repo)
+ else:
+ cg.apply(repo, tr, 'clonebundles', url)
+ return True
+ except urlerr.httperror as e:
+ ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
+ except urlerr.urlerror as e:
+ ui.warn(_('error fetching bundle: %s\n') % e.reason)
- return False
- finally:
- tr.release()
- finally:
- lock.release()
+ return False
--- a/mercurial/extensions.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/extensions.py Tue Jun 20 16:33:46 2017 -0400
@@ -28,8 +28,8 @@
_disabledextensions = {}
_aftercallbacks = {}
_order = []
-_builtin = set(['hbisect', 'bookmarks', 'parentrevspec', 'progress', 'interhg',
- 'inotify', 'hgcia'])
+_builtin = {'hbisect', 'bookmarks', 'parentrevspec', 'progress', 'interhg',
+ 'inotify', 'hgcia'}
def extensions(ui=None):
if ui:
@@ -118,6 +118,23 @@
if ui.debugflag:
ui.traceback()
+# attributes set by registrar.command
+_cmdfuncattrs = ('norepo', 'optionalrepo', 'inferrepo')
+
+def _validatecmdtable(ui, cmdtable):
+ """Check if extension commands have required attributes"""
+ for c, e in cmdtable.iteritems():
+ f = e[0]
+ if getattr(f, '_deprecatedregistrar', False):
+ ui.deprecwarn("cmdutil.command is deprecated, use "
+ "registrar.command to register '%s'" % c, '4.6')
+ missing = [a for a in _cmdfuncattrs if not util.safehasattr(f, a)]
+ if not missing:
+ continue
+ raise error.ProgrammingError(
+ 'missing attributes: %s' % ', '.join(missing),
+ hint="use @command decorator to register '%s'" % c)
+
def load(ui, name, path):
if name.startswith('hgext.') or name.startswith('hgext/'):
shortname = name[6:]
@@ -139,6 +156,7 @@
ui.warn(_('(third party extension %s requires version %s or newer '
'of Mercurial; disabling)\n') % (shortname, minver))
return
+ _validatecmdtable(ui, getattr(mod, 'cmdtable', {}))
_extensions[shortname] = mod
_order.append(shortname)
@@ -149,20 +167,36 @@
def _runuisetup(name, ui):
uisetup = getattr(_extensions[name], 'uisetup', None)
if uisetup:
- uisetup(ui)
+ try:
+ uisetup(ui)
+ except Exception as inst:
+ ui.traceback()
+ msg = _forbytes(inst)
+ ui.warn(_("*** failed to set up extension %s: %s\n") % (name, msg))
+ return False
+ return True
def _runextsetup(name, ui):
extsetup = getattr(_extensions[name], 'extsetup', None)
if extsetup:
try:
- extsetup(ui)
- except TypeError:
- if inspect.getargspec(extsetup).args:
- raise
- extsetup() # old extsetup with no ui argument
+ try:
+ extsetup(ui)
+ except TypeError:
+ if inspect.getargspec(extsetup).args:
+ raise
+ extsetup() # old extsetup with no ui argument
+ except Exception as inst:
+ ui.traceback()
+ msg = _forbytes(inst)
+ ui.warn(_("*** failed to set up extension %s: %s\n") % (name, msg))
+ return False
+ return True
-def loadall(ui):
+def loadall(ui, whitelist=None):
result = ui.configitems("extensions")
+ if whitelist is not None:
+ result = [(k, v) for (k, v) in result if k in whitelist]
newindex = len(_order)
for (name, path) in result:
if path:
@@ -171,23 +205,31 @@
continue
try:
load(ui, name, path)
- except KeyboardInterrupt:
- raise
except Exception as inst:
- inst = _forbytes(inst)
+ msg = _forbytes(inst)
if path:
ui.warn(_("*** failed to import extension %s from %s: %s\n")
- % (name, path, inst))
+ % (name, path, msg))
else:
ui.warn(_("*** failed to import extension %s: %s\n")
- % (name, inst))
+ % (name, msg))
+ if isinstance(inst, error.Hint) and inst.hint:
+ ui.warn(_("*** (%s)\n") % inst.hint)
ui.traceback()
+ broken = set()
+ for name in _order[newindex:]:
+ if not _runuisetup(name, ui):
+ broken.add(name)
+
for name in _order[newindex:]:
- _runuisetup(name, ui)
+ if name in broken:
+ continue
+ if not _runextsetup(name, ui):
+ broken.add(name)
- for name in _order[newindex:]:
- _runextsetup(name, ui)
+ for name in broken:
+ _extensions[name] = None
# Call aftercallbacks that were never met.
for shortname in _aftercallbacks:
@@ -288,6 +330,25 @@
table[key] = tuple(newentry)
return entry
+def wrapfilecache(cls, propname, wrapper):
+ """Wraps a filecache property.
+
+ These can't be wrapped using the normal wrapfunction.
+ """
+ assert callable(wrapper)
+ for currcls in cls.__mro__:
+ if propname in currcls.__dict__:
+ origfn = currcls.__dict__[propname].func
+ assert callable(origfn)
+ def wrap(*args, **kwargs):
+ return wrapper(origfn, *args, **kwargs)
+ currcls.__dict__[propname].func = wrap
+ break
+
+ if currcls is object:
+ raise AttributeError(
+ _("type '%s' has no property '%s'") % (cls, propname))
+
def wrapfunction(container, funcname, wrapper):
'''Wrap the function named funcname in container
--- a/mercurial/fancyopts.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/fancyopts.py Tue Jun 20 16:33:46 2017 -0400
@@ -14,7 +14,7 @@
)
# Set of flags to not apply boolean negation logic on
-nevernegate = set([
+nevernegate = {
# avoid --no-noninteractive
'noninteractive',
# These two flags are special because they cause hg to do one
@@ -22,7 +22,7 @@
# like aliases anyway.
'help',
'version',
- ])
+}
def gnugetopt(args, options, longoptions):
"""Parse options mostly like getopt.gnu_getopt.
--- a/mercurial/filelog.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/filelog.py Tue Jun 20 16:33:46 2017 -0400
@@ -18,7 +18,7 @@
_mdre = re.compile('\1\n')
def parsemeta(text):
- """return (metadatadict, keylist, metadatasize)"""
+ """return (metadatadict, metadatasize)"""
# text can be buffer, so we can't use .startswith or .index
if text[:2] != '\1\n':
return None, None
--- a/mercurial/filemerge.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/filemerge.py Tue Jun 20 16:33:46 2017 -0400
@@ -49,6 +49,17 @@
mergeonly = 'mergeonly' # just the full merge, no premerge
fullmerge = 'fullmerge' # both premerge and merge
+_localchangedotherdeletedmsg = _(
+ "local%(l)s changed %(fd)s which other%(o)s deleted\n"
+ "use (c)hanged version, (d)elete, or leave (u)nresolved?"
+ "$$ &Changed $$ &Delete $$ &Unresolved")
+
+_otherchangedlocaldeletedmsg = _(
+ "other%(o)s changed %(fd)s which local%(l)s deleted\n"
+ "use (c)hanged version, leave (d)eleted, or "
+ "leave (u)nresolved?"
+ "$$ &Changed $$ &Deleted $$ &Unresolved")
+
class absentfilectx(object):
"""Represents a file that's ostensibly in a context but is actually not
present in it.
@@ -133,7 +144,7 @@
def check(tool, pat, symlink, binary, changedelete):
tmsg = tool
if pat:
- tmsg += " specified for " + pat
+ tmsg = _("%s (for pattern %s)") % (tool, pat)
if not _findtool(ui, tool):
if pat: # explicitly requested tool deserves a warning
ui.warn(_("couldn't find merge tool %s\n") % tmsg)
@@ -209,6 +220,9 @@
# internal merge or prompt as last resort
if symlink or binary or changedelete:
+ if not changedelete and len(tools):
+ # any tool is rejected by capability for symlink or binary
+ ui.warn(_("no tool found to merge %s\n") % path)
return ":prompt", None
return ":merge", None
@@ -247,21 +261,16 @@
try:
if fco.isabsent():
index = ui.promptchoice(
- _("local%(l)s changed %(fd)s which other%(o)s deleted\n"
- "use (c)hanged version, (d)elete, or leave (u)nresolved?"
- "$$ &Changed $$ &Delete $$ &Unresolved") % prompts, 2)
+ _localchangedotherdeletedmsg % prompts, 2)
choice = ['local', 'other', 'unresolved'][index]
elif fcd.isabsent():
index = ui.promptchoice(
- _("other%(o)s changed %(fd)s which local%(l)s deleted\n"
- "use (c)hanged version, leave (d)eleted, or "
- "leave (u)nresolved?"
- "$$ &Changed $$ &Deleted $$ &Unresolved") % prompts, 2)
+ _otherchangedlocaldeletedmsg % prompts, 2)
choice = ['other', 'local', 'unresolved'][index]
else:
index = ui.promptchoice(
- _("no tool found to merge %(fd)s\n"
- "keep (l)ocal%(l)s, take (o)ther%(o)s, or leave (u)nresolved?"
+ _("keep (l)ocal%(l)s, take (o)ther%(o)s, or leave (u)nresolved"
+ " for %(fd)s?"
"$$ &Local $$ &Other $$ &Unresolved") % prompts, 2)
choice = ['local', 'other', 'unresolved'][index]
@@ -455,7 +464,11 @@
perform a merge manually. If the file to be merged is named
``a.txt``, these files will accordingly be named ``a.txt.local``,
``a.txt.other`` and ``a.txt.base`` and they will be placed in the
- same directory as ``a.txt``."""
+ same directory as ``a.txt``.
+
+ This implies permerge. Therefore, files aren't dumped, if premerge
+ runs successfully. Use :forcedump to forcibly write files out.
+ """
a, b, c, back = files
fd = fcd.path()
@@ -465,6 +478,15 @@
repo.wwrite(fd + ".base", fca.data(), fca.flags())
return False, 1, False
+@internaltool('forcedump', mergeonly)
+def _forcedump(repo, mynode, orig, fcd, fco, fca, toolconf, files,
+ labels=None):
+ """
+ Creates three versions of the files as same as :dump, but omits premerge.
+ """
+ return _idump(repo, mynode, orig, fcd, fco, fca, toolconf, files,
+ labels=labels)
+
def _xmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
tool, toolpath, binary, symlink = toolconf
if fcd.isabsent() or fco.isabsent():
@@ -512,10 +534,10 @@
props['templ'] = template
props['ctx'] = ctx
props['repo'] = repo
- templateresult = template('conflictmarker', **props)
+ templateresult = template.render(props)
label = ('%s:' % label).ljust(pad + 1)
- mark = '%s %s' % (label, templater.stringify(templateresult))
+ mark = '%s %s' % (label, templateresult)
if mark:
mark = mark.splitlines()[0] # split for safety
@@ -544,7 +566,7 @@
ui = repo.ui
template = ui.config('ui', 'mergemarkertemplate', _defaultconflictmarker)
template = templater.unquotestring(template)
- tmpl = formatter.maketemplater(ui, 'conflictmarker', template)
+ tmpl = formatter.maketemplater(ui, template)
pad = max(len(l) for l in labels)
@@ -606,7 +628,8 @@
# normalize to new-style names (':merge' etc)
tool = tool[len('internal'):]
ui.debug("picked tool '%s' for %s (binary %s symlink %s changedelete %s)\n"
- % (tool, fd, binary, symlink, changedelete))
+ % (tool, fd, pycompat.bytestr(binary), pycompat.bytestr(symlink),
+ pycompat.bytestr(changedelete)))
if tool in internals:
func = internals[tool]
--- a/mercurial/fileset.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/fileset.py Tue Jun 20 16:33:46 2017 -0400
@@ -14,6 +14,7 @@
error,
merge,
parser,
+ pycompat,
registrar,
scmutil,
util,
@@ -37,12 +38,13 @@
"end": (0, None, None, None, None),
}
-keywords = set(['and', 'or', 'not'])
+keywords = {'and', 'or', 'not'}
globchars = ".*{}[]?/\\_"
def tokenize(program):
pos, l = 0, len(program)
+ program = pycompat.bytestr(program)
while pos < l:
c = program[pos]
if c.isspace(): # skip inter-token whitespace
@@ -256,7 +258,7 @@
"""
# i18n: "binary" is a keyword
getargs(x, 0, 0, _("binary takes no arguments"))
- return [f for f in mctx.existing() if util.binary(mctx.ctx[f].data())]
+ return [f for f in mctx.existing() if mctx.ctx[f].isbinary()]
@predicate('exec()', callexisting=True)
def exec_(mctx, x):
--- a/mercurial/formatter.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/formatter.py Tue Jun 20 16:33:46 2017 -0400
@@ -103,6 +103,8 @@
from __future__ import absolute_import
+import collections
+import contextlib
import itertools
import os
@@ -114,6 +116,7 @@
from . import (
error,
+ pycompat,
templatefilters,
templatekw,
templater,
@@ -178,6 +181,7 @@
pass
def data(self, **data):
'''insert data into item that's not shown in default output'''
+ data = pycompat.byteskwargs(data)
self._item.update(data)
def write(self, fields, deftext, *fielddata, **opts):
'''do default text output while assigning data to item'''
@@ -204,6 +208,10 @@
if self._item is not None:
self._showitem()
+def nullformatter(ui, topic):
+ '''formatter that prints nothing'''
+ return baseformatter(ui, topic, opts={}, converter=_nullconverter)
+
class _nestedformatter(baseformatter):
'''build sub items and store them in the parent formatter'''
def __init__(self, ui, converter, data):
@@ -235,24 +243,28 @@
class plainformatter(baseformatter):
'''the default text output scheme'''
- def __init__(self, ui, topic, opts):
+ def __init__(self, ui, out, topic, opts):
baseformatter.__init__(self, ui, topic, opts, _plainconverter)
if ui.debugflag:
self.hexfunc = hex
else:
self.hexfunc = short
+ if ui is out:
+ self._write = ui.write
+ else:
+ self._write = lambda s, **opts: out.write(s)
def startitem(self):
pass
def data(self, **data):
pass
def write(self, fields, deftext, *fielddata, **opts):
- self._ui.write(deftext % fielddata, **opts)
+ self._write(deftext % fielddata, **opts)
def condwrite(self, cond, fields, deftext, *fielddata, **opts):
'''do conditional write'''
if cond:
- self._ui.write(deftext % fielddata, **opts)
+ self._write(deftext % fielddata, **opts)
def plain(self, text, **opts):
- self._ui.write(text, **opts)
+ self._write(text, **opts)
def isplain(self):
return True
def nested(self, field):
@@ -335,13 +347,14 @@
def __init__(self, ui, out, topic, opts):
baseformatter.__init__(self, ui, topic, opts, _templateconverter)
self._out = out
- self._topic = topic
- self._t = gettemplater(ui, topic, opts.get('template', ''),
- cache=templatekw.defaulttempl)
+ spec = lookuptemplate(ui, topic, opts.get('template', ''))
+ self._tref = spec.ref
+ self._t = loadtemplater(ui, spec, cache=templatekw.defaulttempl)
self._counter = itertools.count()
self._cache = {} # for templatekw/funcs to store reusable data
def context(self, **ctxs):
'''insert context objects to be used to render template keywords'''
+ ctxs = pycompat.byteskwargs(ctxs)
assert all(k == 'ctx' for k in ctxs)
self._item.update(ctxs)
def _showitem(self):
@@ -359,25 +372,46 @@
props['templ'] = self._t
props['repo'] = props['ctx'].repo()
props['revcache'] = {}
- g = self._t(self._topic, ui=self._ui, cache=self._cache, **props)
+ props = pycompat.strkwargs(props)
+ g = self._t(self._tref, ui=self._ui, cache=self._cache, **props)
self._out.write(templater.stringify(g))
+templatespec = collections.namedtuple(r'templatespec',
+ r'ref tmpl mapfile')
+
def lookuptemplate(ui, topic, tmpl):
+ """Find the template matching the given -T/--template spec 'tmpl'
+
+ 'tmpl' can be any of the following:
+
+ - a literal template (e.g. '{rev}')
+ - a map-file name or path (e.g. 'changelog')
+ - a reference to [templates] in config file
+ - a path to raw template file
+
+ A map file defines a stand-alone template environment. If a map file
+ selected, all templates defined in the file will be loaded, and the
+ template matching the given topic will be rendered. No aliases will be
+ loaded from user config.
+
+ If no map file selected, all templates in [templates] section will be
+ available as well as aliases in [templatealias].
+ """
+
# looks like a literal template?
if '{' in tmpl:
- return tmpl, None
+ return templatespec('', tmpl, None)
# perhaps a stock style?
if not os.path.split(tmpl)[0]:
mapname = (templater.templatepath('map-cmdline.' + tmpl)
or templater.templatepath(tmpl))
if mapname and os.path.isfile(mapname):
- return None, mapname
+ return templatespec(topic, None, mapname)
# perhaps it's a reference to [templates]
- t = ui.config('templates', tmpl)
- if t:
- return templater.unquotestring(t), None
+ if ui.config('templates', tmpl):
+ return templatespec(tmpl, None, None)
if tmpl == 'list':
ui.write(_("available styles: %s\n") % templater.stylelist())
@@ -387,42 +421,72 @@
if ('/' in tmpl or '\\' in tmpl) and os.path.isfile(tmpl):
# is it a mapfile for a style?
if os.path.basename(tmpl).startswith("map-"):
- return None, os.path.realpath(tmpl)
- tmpl = open(tmpl).read()
- return tmpl, None
+ return templatespec(topic, None, os.path.realpath(tmpl))
+ with util.posixfile(tmpl, 'rb') as f:
+ tmpl = f.read()
+ return templatespec('', tmpl, None)
# constant string?
- return tmpl, None
+ return templatespec('', tmpl, None)
-def gettemplater(ui, topic, spec, cache=None):
- tmpl, mapfile = lookuptemplate(ui, topic, spec)
- assert not (tmpl and mapfile)
- if mapfile:
- return templater.templater.frommapfile(mapfile, cache=cache)
- return maketemplater(ui, topic, tmpl, cache=cache)
+def loadtemplater(ui, spec, cache=None):
+ """Create a templater from either a literal template or loading from
+ a map file"""
+ assert not (spec.tmpl and spec.mapfile)
+ if spec.mapfile:
+ return templater.templater.frommapfile(spec.mapfile, cache=cache)
+ return maketemplater(ui, spec.tmpl, cache=cache)
-def maketemplater(ui, topic, tmpl, cache=None):
+def maketemplater(ui, tmpl, cache=None):
"""Create a templater from a string template 'tmpl'"""
aliases = ui.configitems('templatealias')
t = templater.templater(cache=cache, aliases=aliases)
+ t.cache.update((k, templater.unquotestring(v))
+ for k, v in ui.configitems('templates'))
if tmpl:
- t.cache[topic] = tmpl
+ t.cache[''] = tmpl
return t
-def formatter(ui, topic, opts):
+def formatter(ui, out, topic, opts):
template = opts.get("template", "")
if template == "json":
- return jsonformatter(ui, ui, topic, opts)
+ return jsonformatter(ui, out, topic, opts)
elif template == "pickle":
- return pickleformatter(ui, ui, topic, opts)
+ return pickleformatter(ui, out, topic, opts)
elif template == "debug":
- return debugformatter(ui, ui, topic, opts)
+ return debugformatter(ui, out, topic, opts)
elif template != "":
- return templateformatter(ui, ui, topic, opts)
+ return templateformatter(ui, out, topic, opts)
# developer config: ui.formatdebug
elif ui.configbool('ui', 'formatdebug'):
- return debugformatter(ui, ui, topic, opts)
+ return debugformatter(ui, out, topic, opts)
# deprecated config: ui.formatjson
elif ui.configbool('ui', 'formatjson'):
- return jsonformatter(ui, ui, topic, opts)
- return plainformatter(ui, topic, opts)
+ return jsonformatter(ui, out, topic, opts)
+ return plainformatter(ui, out, topic, opts)
+
+@contextlib.contextmanager
+def openformatter(ui, filename, topic, opts):
+ """Create a formatter that writes outputs to the specified file
+
+ Must be invoked using the 'with' statement.
+ """
+ with util.posixfile(filename, 'wb') as out:
+ with formatter(ui, out, topic, opts) as fm:
+ yield fm
+
+@contextlib.contextmanager
+def _neverending(fm):
+ yield fm
+
+def maybereopen(fm, filename, opts):
+ """Create a formatter backed by file if filename specified, else return
+ the given formatter
+
+ Must be invoked using the 'with' statement. This will never call fm.end()
+ of the given formatter.
+ """
+ if filename:
+ return openformatter(fm._ui, filename, fm._topic, opts)
+ else:
+ return _neverending(fm)
--- a/mercurial/graphmod.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/graphmod.py Tue Jun 20 16:33:46 2017 -0400
@@ -21,7 +21,7 @@
from .node import nullrev
from . import (
- revset,
+ dagop,
smartset,
util,
)
@@ -70,7 +70,7 @@
# through all revs (issue4782)
if not isinstance(revs, smartset.baseset):
revs = smartset.baseset(revs)
- gp = gpcache[mpar] = sorted(set(revset.reachableroots(
+ gp = gpcache[mpar] = sorted(set(dagop.reachableroots(
repo, revs, [mpar])))
if not gp:
parents.append((MISSINGPARENT, mpar))
@@ -273,7 +273,7 @@
# | | | | | |
line.extend(echars[idx * 2:(idx + 1) * 2])
else:
- line.extend(' ')
+ line.extend([' ', ' '])
# all edges to the right of the current node
remainder = ncols - idx - 1
if remainder > 0:
@@ -410,14 +410,17 @@
# shift_interline is the line containing the non-vertical
# edges between this entry and the next
shift_interline = echars[:idx * 2]
- shift_interline.extend(' ' * (2 + coldiff))
+ for i in xrange(2 + coldiff):
+ shift_interline.append(' ')
count = ncols - idx - 1
if coldiff == -1:
- shift_interline.extend('/ ' * count)
+ for i in xrange(count):
+ shift_interline.extend(['/', ' '])
elif coldiff == 0:
shift_interline.extend(echars[(idx + 1) * 2:ncols * 2])
else:
- shift_interline.extend(r'\ ' * count)
+ for i in xrange(count):
+ shift_interline.extend(['\\', ' '])
# draw edges from the current node to its parents
_drawedges(echars, edges, nodeline, shift_interline)
--- a/mercurial/help.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/help.py Tue Jun 20 16:33:46 2017 -0400
@@ -23,6 +23,7 @@
filemerge,
fileset,
minirst,
+ pycompat,
revset,
templatefilters,
templatekw,
@@ -33,7 +34,7 @@
webcommands,
)
-_exclkeywords = set([
+_exclkeywords = {
"(ADVANCED)",
"(DEPRECATED)",
"(EXPERIMENTAL)",
@@ -43,7 +44,7 @@
_("(DEPRECATED)"),
# i18n: "(EXPERIMENTAL)" is a keyword, must be translated consistently
_("(EXPERIMENTAL)"),
- ])
+}
def listexts(header, exts, indent=1, showdeprecated=False):
'''return a text listing of the given extensions'''
@@ -83,7 +84,11 @@
so = '-' + shortopt
lo = '--' + longopt
if default:
- desc += _(" (default: %s)") % default
+ # default is of unknown type, and in Python 2 we abused
+ # the %s-shows-repr property to handle integers etc. To
+ # match that behavior on Python 3, we do str(default) and
+ # then convert it to bytes.
+ desc += _(" (default: %s)") % pycompat.bytestr(default)
if isinstance(default, list):
lo += " %s [+]" % optlabel
@@ -113,7 +118,7 @@
return True
return False
-def topicmatch(ui, kw):
+def topicmatch(ui, commands, kw):
"""Return help topics matching kw.
Returns {'section': [(name, summary), ...], ...} where section is
@@ -133,14 +138,13 @@
or lowercontains(header)
or (callable(doc) and lowercontains(doc(ui)))):
results['topics'].append((names[0], header))
- from . import commands # avoid cycle
for cmd, entry in commands.table.iteritems():
if len(entry) == 3:
summary = entry[2]
else:
summary = ''
# translate docs *before* searching there
- docs = _(getattr(entry[0], '__doc__', None)) or ''
+ docs = _(pycompat.getdoc(entry[0])) or ''
if kw in cmd or lowercontains(summary) or lowercontains(docs):
doclines = docs.splitlines()
if doclines:
@@ -162,8 +166,9 @@
for cmd, entry in getattr(mod, 'cmdtable', {}).iteritems():
if kw in cmd or (len(entry) > 2 and lowercontains(entry[2])):
cmdname = cmd.partition('|')[0].lstrip('^')
- if entry[0].__doc__:
- cmddoc = gettext(entry[0].__doc__).splitlines()[0]
+ cmddoc = pycompat.getdoc(entry[0])
+ if cmddoc:
+ cmddoc = gettext(cmddoc).splitlines()[0]
else:
cmddoc = _('(no help text available)')
if filtercmd(ui, cmdname, kw, cmddoc):
@@ -259,13 +264,14 @@
"""
entries = []
for name in sorted(items):
- text = (items[name].__doc__ or '').rstrip()
+ text = (pycompat.getdoc(items[name]) or '').rstrip()
if (not text
or not ui.verbose and any(w in text for w in _exclkeywords)):
continue
text = gettext(text)
if dedent:
- text = textwrap.dedent(text)
+ # Abuse latin1 to use textwrap.dedent() on bytes.
+ text = textwrap.dedent(text.decode('latin1')).encode('latin1')
lines = text.splitlines()
doclines = [(lines[0])]
for l in lines[1:]:
@@ -297,13 +303,14 @@
addtopicsymbols('hgweb', '.. webcommandsmarker', webcommands.commands,
dedent=True)
-def help_(ui, name, unknowncmd=False, full=True, subtopic=None, **opts):
+def help_(ui, commands, name, unknowncmd=False, full=True, subtopic=None,
+ **opts):
'''
Generate the help for 'name' as unformatted restructured text. If
'name' is None, describe the commands available.
'''
- from . import commands # avoid cycle
+ opts = pycompat.byteskwargs(opts)
def helpcmd(name, subtopic=None):
try:
@@ -343,7 +350,7 @@
rst.append('\n')
# description
- doc = gettext(entry[0].__doc__)
+ doc = gettext(pycompat.getdoc(entry[0]))
if not doc:
doc = _("(no help text available)")
if util.safehasattr(entry[0], 'definition'): # aliased command
@@ -365,7 +372,7 @@
# extension help text
try:
mod = extensions.find(name)
- doc = gettext(mod.__doc__) or ''
+ doc = gettext(pycompat.getdoc(mod)) or ''
if '\n' in doc.strip():
msg = _("(use 'hg help -e %s' to show help for "
"the %s extension)") % (name, name)
@@ -413,7 +420,7 @@
if name == "shortlist" and not f.startswith("^"):
continue
f = f.lstrip("^")
- doc = e[0].__doc__
+ doc = pycompat.getdoc(e[0])
if filtercmd(ui, f, name, doc):
continue
doc = gettext(doc)
@@ -516,7 +523,7 @@
def helpext(name, subtopic=None):
try:
mod = extensions.find(name)
- doc = gettext(mod.__doc__) or _('no help text available')
+ doc = gettext(pycompat.getdoc(mod)) or _('no help text available')
except KeyError:
mod = None
doc = extensions.disabledext(name)
@@ -552,7 +559,7 @@
def helpextcmd(name, subtopic=None):
cmd, ext, mod = extensions.disabledcmd(ui, name,
ui.configbool('ui', 'strict'))
- doc = gettext(mod.__doc__).splitlines()[0]
+ doc = gettext(pycompat.getdoc(mod)).splitlines()[0]
rst = listexts(_("'%s' is provided by the following "
"extension:") % cmd, {ext: doc}, indent=4,
@@ -566,7 +573,7 @@
rst = []
kw = opts.get('keyword')
if kw or name is None and any(opts[o] for o in opts):
- matches = topicmatch(ui, name or '')
+ matches = topicmatch(ui, commands, name or '')
helpareas = []
if opts.get('extension'):
helpareas += [('extensions', _('Extensions'))]
@@ -613,11 +620,12 @@
# program name
if not ui.quiet:
rst = [_("Mercurial Distributed SCM\n"), '\n']
- rst.extend(helplist(None, **opts))
+ rst.extend(helplist(None, **pycompat.strkwargs(opts)))
return ''.join(rst)
-def formattedhelp(ui, name, keep=None, unknowncmd=False, full=True, **opts):
+def formattedhelp(ui, commands, name, keep=None, unknowncmd=False, full=True,
+ **opts):
"""get help for a given topic (as a dotted name) as rendered rst
Either returns the rendered help text or raises an exception.
@@ -643,7 +651,7 @@
termwidth = ui.termwidth() - 2
if textwidth <= 0 or termwidth < textwidth:
textwidth = termwidth
- text = help_(ui, name,
+ text = help_(ui, commands, name,
subtopic=subtopic, unknowncmd=unknowncmd, full=full, **opts)
formatted, pruned = minirst.format(text, textwidth, keep=keep,
--- a/mercurial/help/color.txt Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/help/color.txt Tue Jun 20 16:33:46 2017 -0400
@@ -22,7 +22,8 @@
The default pager on Windows does not support color, so enabling the pager
will effectively disable color. See :hg:`help config.ui.paginate` to disable
the pager. Alternately, MSYS and Cygwin shells provide `less` as a pager,
- which can be configured to support ANSI color mode.
+ which can be configured to support ANSI color mode. Windows 10 natively
+ supports ANSI color mode.
Mode
====
--- a/mercurial/help/config.txt Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/help/config.txt Tue Jun 20 16:33:46 2017 -0400
@@ -415,7 +415,7 @@
``mode``
String: control the method used to output color. One of ``auto``, ``ansi``,
``win32``, ``terminfo`` or ``debug``. In auto mode, Mercurial will
- use ANSI mode by default (or win32 mode on Windows) if it detects a
+ use ANSI mode by default (or win32 mode prior to Windows 10) if it detects a
terminal. Any invalid value will disable color.
``pagermode``
@@ -1132,6 +1132,7 @@
A list of hashes of the DER encoded peer/remote certificate. Values have
the form ``algorithm``:``fingerprint``. e.g.
``sha256:c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2``.
+ In addition, colons (``:``) can appear in the fingerprint part.
The following algorithms/prefixes are supported: ``sha1``, ``sha256``,
``sha512``.
@@ -1181,6 +1182,7 @@
[hostsecurity]
hg.example.com:fingerprints = sha256:c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2
hg2.example.com:fingerprints = sha1:914f1aff87249c09b6859b88b1906d30756491ca, sha1:fc:e2:8d:d9:51:cd:cb:c1:4d:18:6b:b7:44:8d:49:72:57:e6:cd:33
+ hg3.example.com:fingerprints = sha256:9a:b0:dc:e2:75:ad:8a:b7:84:58:e5:1f:07:32:f1:87:e6:bd:24:22:af:b7:ce:8e:9c:b4:10:cf:b9:f4:0e:d2
foo.example.com:verifycertsfile = /etc/ssl/trusted-ca-certs.pem
To change the default minimum protocol version to TLS 1.2 but to allow TLS 1.1
@@ -1571,6 +1573,28 @@
Specific to the ``ls`` instrumenting profiler.
(default: 5)
+``showmin``
+ Minimum fraction of samples an entry must have for it to be displayed.
+ Can be specified as a float between ``0.0`` and ``1.0`` or can have a
+ ``%`` afterwards to allow values up to ``100``. e.g. ``5%``.
+
+ Only used by the ``stat`` profiler.
+
+ For the ``hotpath`` format, default is ``0.05``.
+ For the ``chrome`` format, default is ``0.005``.
+
+ The option is unused on other formats.
+
+``showmax``
+ Maximum fraction of samples an entry can have before it is ignored in
+ display. Values format is the same as ``showmin``.
+
+ Only used by the ``stat`` profiler.
+
+ For the ``chrome`` format, default is ``0.999``.
+
+ The option is unused on other formats.
+
``progress``
------------
@@ -1656,10 +1680,31 @@
the write lock while determining what data to transfer.
(default: True)
+``uncompressedallowsecret``
+ Whether to allow stream clones when the repository contains secret
+ changesets. (default: False)
+
``preferuncompressed``
When set, clients will try to use the uncompressed streaming
protocol. (default: False)
+``disablefullbundle``
+ When set, servers will refuse attempts to do pull-based clones.
+ If this option is set, ``preferuncompressed`` and/or clone bundles
+ are highly recommended. Partial clones will still be allowed.
+ (default: False)
+
+``concurrent-push-mode``
+ Level of allowed race condition between two pushing clients.
+
+ - 'strict': push is abort if another client touched the repository
+ while the push was preparing. (default)
+ - 'check-related': push is only aborted if it affects head that got also
+ affected while the push was preparing.
+
+ This requires compatible client (version 4.3 and later). Old client will
+ use 'strict'.
+
``validate``
Whether to validate the completeness of pushed changesets by
checking that all new file revisions specified in manifests are
@@ -2059,6 +2104,15 @@
on all exceptions, even those recognized by Mercurial (such as
IOError or MemoryError). (default: False)
+``tweakdefaults``
+
+ By default Mercurial's behavior changes very little from release
+ to release, but over time the recommended config settings
+ shift. Enable this config to opt in to get automatic tweaks to
+ Mercurial's behavior over time. This config setting will have no
+ effet if ``HGPLAIN` is set or ``HGPLAINEXCEPT`` is set and does
+ not include ``tweakdefaults``. (default: False)
+
``username``
The committer of a changeset created when running "commit".
Typically a person's name and email address, e.g. ``Fred Widget
--- a/mercurial/help/internals/revlogs.txt Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/help/internals/revlogs.txt Tue Jun 20 16:33:46 2017 -0400
@@ -45,6 +45,12 @@
1
RevlogNG (*next generation*). It replaced version 0 when it was
implemented in 2006.
+2
+ In-development version incorporating accumulated knowledge and
+ missing features from 10+ years of revlog version 1.
+57005 (0xdead)
+ Reserved for internal testing of new versions. No defined format
+ beyond 32-bit header.
The feature flags short consists of bit flags. Where 0 is the least
significant bit, the following bit offsets define flags:
@@ -60,23 +66,23 @@
The following header values are common:
00 00 00 01
- RevlogNG
+ v1
00 01 00 01
- RevlogNG + inline
+ v1 + inline
00 02 00 01
- RevlogNG + generaldelta
+ v1 + generaldelta
00 03 00 01
- RevlogNG + inline + generaldelta
+ v1 + inline + generaldelta
Following the 32-bit header is the remainder of the first index entry.
Following that are remaining *index* data. Inlined revision data is
possibly located between index entries. More on this layout is described
below.
-RevlogNG Format
-===============
+Version 1 Format
+================
-RevlogNG (version 1) begins with an index describing the revisions in
+Version 1 (RevlogNG) begins with an index describing the revisions in
the revlog. If the ``inline`` flag is set, revision data is stored inline,
or between index entries (as opposed to in a separate container).
@@ -142,6 +148,14 @@
The first 4 bytes of the revlog are shared between the revlog header
and the 6 byte absolute offset field from the first revlog entry.
+Version 2 Format
+================
+
+(In development. Format not finalized or stable.)
+
+Version 2 is currently identical to version 1. This will obviously
+change.
+
Delta Chains
============
--- a/mercurial/help/internals/wireprotocol.txt Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/help/internals/wireprotocol.txt Tue Jun 20 16:33:46 2017 -0400
@@ -632,6 +632,9 @@
branches
--------
+(Legacy command used for discovery in old clients. Clients with ``getbundle``
+use the ``known`` and ``heads`` commands instead.)
+
Obtain ancestor changesets of specific nodes back to a branch point.
Despite the name, this command has nothing to do with Mercurial named branches.
--- a/mercurial/help/revisions.txt Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/help/revisions.txt Tue Jun 20 16:33:46 2017 -0400
@@ -97,6 +97,7 @@
``x~n``
The nth first ancestor of x; ``x~0`` is x; ``x~3`` is ``x^^^``.
+ For n < 0, the nth unambiguous descendent of x.
``x ## y``
Concatenate strings and identifiers into one string.
--- a/mercurial/help/templates.txt Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/help/templates.txt Tue Jun 20 16:33:46 2017 -0400
@@ -109,6 +109,14 @@
$ hg log -r . -Tnodedate
+A template defined in ``templates`` section can also be referenced from
+another template::
+
+ $ hg log -r . -T "{rev} {nodedate}"
+
+but be aware that the keywords cannot be overridden by templates. For example,
+a template defined as ``templates.rev`` cannot be referenced as ``{rev}``.
+
Examples
========
--- a/mercurial/hg.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/hg.py Tue Jun 20 16:33:46 2017 -0400
@@ -148,10 +148,12 @@
# a list of (ui, repo) functions called for wire peer initialization
wirepeersetupfuncs = []
-def _peerorrepo(ui, path, create=False):
+def _peerorrepo(ui, path, create=False, presetupfuncs=None):
"""return a repository object for the specified path"""
obj = _peerlookup(path).instance(ui, path, create)
ui = getattr(obj, "ui", ui)
+ for f in presetupfuncs or []:
+ f(ui, obj)
for name, module in extensions.extensions(ui):
hook = getattr(module, 'reposetup', None)
if hook:
@@ -161,9 +163,9 @@
f(ui, obj)
return obj
-def repository(ui, path='', create=False):
+def repository(ui, path='', create=False, presetupfuncs=None):
"""return a repository object for the specified path"""
- peer = _peerorrepo(ui, path, create)
+ peer = _peerorrepo(ui, path, create, presetupfuncs=presetupfuncs)
repo = peer.local()
if not repo:
raise error.Abort(_("repository '%s' is not local") %
@@ -407,6 +409,29 @@
return srcpeer, peer(ui, peeropts, dest)
+# Recomputing branch cache might be slow on big repos,
+# so just copy it
+def _copycache(srcrepo, dstcachedir, fname):
+ """copy a cache from srcrepo to destcachedir (if it exists)"""
+ srcbranchcache = srcrepo.vfs.join('cache/%s' % fname)
+ dstbranchcache = os.path.join(dstcachedir, fname)
+ if os.path.exists(srcbranchcache):
+ if not os.path.exists(dstcachedir):
+ os.mkdir(dstcachedir)
+ util.copyfile(srcbranchcache, dstbranchcache)
+
+def _cachetocopy(srcrepo):
+ """return the list of cache file valuable to copy during a clone"""
+ # In local clones we're copying all nodes, not just served
+ # ones. Therefore copy all branch caches over.
+ cachefiles = ['branch2']
+ cachefiles += ['branch2-%s' % f for f in repoview.filtertable]
+ cachefiles += ['rbc-names-v1', 'rbc-revs-v1']
+ cachefiles += ['tags2']
+ cachefiles += ['tags2-%s' % f for f in repoview.filtertable]
+ cachefiles += ['hgtagsfnodes1']
+ return cachefiles
+
def clone(ui, peeropts, source, dest=None, pull=False, rev=None,
update=True, stream=False, branch=None, shareopts=None):
"""Make a copy of an existing repository.
@@ -564,22 +589,9 @@
if os.path.exists(srcbookmarks):
util.copyfile(srcbookmarks, dstbookmarks)
- # Recomputing branch cache might be slow on big repos,
- # so just copy it
- def copybranchcache(fname):
- srcbranchcache = srcrepo.vfs.join('cache/%s' % fname)
- dstbranchcache = os.path.join(dstcachedir, fname)
- if os.path.exists(srcbranchcache):
- if not os.path.exists(dstcachedir):
- os.mkdir(dstcachedir)
- util.copyfile(srcbranchcache, dstbranchcache)
-
dstcachedir = os.path.join(destpath, 'cache')
- # In local clones we're copying all nodes, not just served
- # ones. Therefore copy all branch caches over.
- copybranchcache('branch2')
- for cachename in repoview.filtertable:
- copybranchcache('branch2-%s' % cachename)
+ for cache in _cachetocopy(srcrepo):
+ _copycache(srcrepo, dstcachedir, cache)
# we need to re-init the repo after manually copying the data
# into it
@@ -869,7 +881,7 @@
revs = [repo.lookup(rev) for rev in scmutil.revrange(repo, revs)]
other = peer(repo, opts, dest)
- outgoing = discovery.findcommonoutgoing(repo.unfiltered(), other, revs,
+ outgoing = discovery.findcommonoutgoing(repo, other, revs,
force=opts.get('force'))
o = outgoing.missing
if not o:
--- a/mercurial/hgweb/hgweb_mod.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/hgweb/hgweb_mod.py Tue Jun 20 16:33:46 2017 -0400
@@ -311,7 +311,8 @@
should be using instances of this class as the WSGI application.
"""
with self._obtainrepo() as repo:
- with profiling.maybeprofile(repo.ui):
+ profile = repo.ui.configbool('profiling', 'enabled')
+ with profiling.profile(repo.ui, enabled=profile):
for r in self._runwsgi(req, repo):
yield r
--- a/mercurial/hgweb/hgwebdir_mod.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/hgweb/hgwebdir_mod.py Tue Jun 20 16:33:46 2017 -0400
@@ -220,7 +220,8 @@
return False
def run_wsgi(self, req):
- with profiling.maybeprofile(self.ui):
+ profile = self.ui.configbool('profiling', 'enabled')
+ with profiling.profile(self.ui, enabled=profile):
for r in self._runwsgi(req):
yield r
--- a/mercurial/hgweb/webcommands.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/hgweb/webcommands.py Tue Jun 20 16:33:46 2017 -0400
@@ -28,7 +28,7 @@
from .. import (
archival,
- context,
+ dagop,
encoding,
error,
graphmod,
@@ -808,7 +808,7 @@
context = parsecontext(web.config('web', 'comparisoncontext', '5'))
def filelines(f):
- if util.binary(f.data()):
+ if f.isbinary():
mt = mimetypes.guess_type(f.path())[0]
if not mt:
mt = 'application/octet-stream'
@@ -886,7 +886,7 @@
yield p
def annotate(**map):
- if util.binary(fctx.data()):
+ if fctx.isbinary():
mt = (mimetypes.guess_type(fctx.path())[0]
or 'application/octet-stream')
lines = [((fctx.filectx(fctx.filerev()), 1), '(binary:%s)' % mt)]
@@ -1013,9 +1013,9 @@
# would required a dedicated "revnav" class
nav = None
if descend:
- it = context.blockdescendants(fctx, *lrange)
+ it = dagop.blockdescendants(fctx, *lrange)
else:
- it = context.blockancestors(fctx, *lrange)
+ it = dagop.blockancestors(fctx, *lrange)
for i, (c, lr) in enumerate(it, 1):
diffs = None
if patch:
@@ -1374,7 +1374,7 @@
subtopic = None
try:
- doc = helpmod.help_(u, topic, subtopic=subtopic)
+ doc = helpmod.help_(u, commands, topic, subtopic=subtopic)
except error.UnknownCommand:
raise ErrorResponse(HTTP_NOT_FOUND)
return tmpl('help', topic=topicname, doc=doc)
--- a/mercurial/hook.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/hook.py Tue Jun 20 16:33:46 2017 -0400
@@ -13,6 +13,7 @@
from .i18n import _
from . import (
demandimport,
+ encoding,
error,
extensions,
pycompat,
@@ -31,7 +32,7 @@
if callable(funcname):
obj = funcname
- funcname = obj.__module__ + "." + obj.__name__
+ funcname = pycompat.sysbytes(obj.__module__ + r"." + obj.__name__)
else:
d = funcname.rfind('.')
if d == -1:
@@ -97,7 +98,7 @@
(hname, exc.args[0]))
else:
ui.warn(_('error: %s hook raised an exception: '
- '%s\n') % (hname, exc))
+ '%s\n') % (hname, encoding.strtolocal(str(exc))))
if throw:
raise
if not ui.tracebackflag:
@@ -204,6 +205,7 @@
return r
def runhooks(ui, repo, htype, hooks, throw=False, **args):
+ args = pycompat.byteskwargs(args)
res = {}
oldstdout = -1
--- a/mercurial/keepalive.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/keepalive.py Tue Jun 20 16:33:46 2017 -0400
@@ -353,7 +353,9 @@
def __init__(self, sock, debuglevel=0, strict=0, method=None):
- httplib.HTTPResponse.__init__(self, sock, debuglevel, method)
+ httplib.HTTPResponse.__init__(self, sock, debuglevel=debuglevel,
+ strict=True, method=method,
+ buffering=True)
self.fileno = sock.fileno
self.code = None
self._rbuf = ''
--- a/mercurial/localrepo.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/localrepo.py Tue Jun 20 16:33:46 2017 -0400
@@ -20,7 +20,6 @@
hex,
nullid,
short,
- wdirrev,
)
from . import (
bookmarks,
@@ -113,9 +112,9 @@
return orig(repo.unfiltered(), *args, **kwargs)
return wrapper
-moderncaps = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
- 'unbundle'))
-legacycaps = moderncaps.union(set(['changegroupsubset']))
+moderncaps = {'lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
+ 'unbundle'}
+legacycaps = moderncaps.union({'changegroupsubset'})
class localpeer(peer.peerrepository):
'''peer for a local repo; reflects only the most recent API'''
@@ -164,7 +163,7 @@
**kwargs)
cb = util.chunkbuffer(chunks)
- if bundlecaps is not None and 'HG20' in bundlecaps:
+ if exchange.bundle2requested(bundlecaps):
# When requesting a bundle2, getbundle returns a stream to make the
# wire level function happier. We need to build a proper object
# from it in local peer.
@@ -213,9 +212,6 @@
def lock(self):
return self._repo.lock()
- def addchangegroup(self, cg, source, url):
- return cg.apply(self._repo, source, url)
-
def pushkey(self, namespace, key, old, new):
return self._repo.pushkey(namespace, key, old, new)
@@ -245,14 +241,32 @@
def changegroupsubset(self, bases, heads, source):
return changegroup.changegroupsubset(self._repo, bases, heads, source)
+# Increment the sub-version when the revlog v2 format changes to lock out old
+# clients.
+REVLOGV2_REQUIREMENT = 'exp-revlogv2.0'
+
class localrepository(object):
- supportedformats = set(('revlogv1', 'generaldelta', 'treemanifest',
- 'manifestv2'))
- _basesupported = supportedformats | set(('store', 'fncache', 'shared',
- 'relshared', 'dotencode'))
- openerreqs = set(('revlogv1', 'generaldelta', 'treemanifest', 'manifestv2'))
- filtername = None
+ supportedformats = {
+ 'revlogv1',
+ 'generaldelta',
+ 'treemanifest',
+ 'manifestv2',
+ REVLOGV2_REQUIREMENT,
+ }
+ _basesupported = supportedformats | {
+ 'store',
+ 'fncache',
+ 'shared',
+ 'relshared',
+ 'dotencode',
+ }
+ openerreqs = {
+ 'revlogv1',
+ 'generaldelta',
+ 'treemanifest',
+ 'manifestv2',
+ }
# a list of (ui, featureset) functions.
# only functions defined in module of enabled extensions are invoked
@@ -260,6 +274,7 @@
def __init__(self, baseui, path, create=False):
self.requirements = set()
+ self.filtername = None
# wvfs: rooted at the repository root, used to access the working copy
self.wvfs = vfsmod.vfs(path, expandpath=True, realpath=True)
# vfs: rooted at .hg, used to access repo files outside of .hg/store
@@ -382,19 +397,12 @@
# - bookmark changes
self.filteredrevcache = {}
+ # post-dirstate-status hooks
+ self._postdsstatus = []
+
# generic mapping between names and nodes
self.names = namespaces.namespaces()
- @property
- def wopener(self):
- self.ui.deprecwarn("use 'repo.wvfs' instead of 'repo.wopener'", '4.2')
- return self.wvfs
-
- @property
- def opener(self):
- self.ui.deprecwarn("use 'repo.vfs' instead of 'repo.opener'", '4.2')
- return self.vfs
-
def close(self):
self._writecaches()
@@ -437,6 +445,10 @@
if r.startswith('exp-compression-'):
self.svfs.options['compengine'] = r[len('exp-compression-'):]
+ # TODO move "revlogv2" to openerreqs once finalized.
+ if REVLOGV2_REQUIREMENT in self.requirements:
+ self.svfs.options['revlogv2'] = True
+
def _writerequirements(self):
scmutil.writerequires(self.vfs, self.requirements)
@@ -503,14 +515,6 @@
def _activebookmark(self):
return self._bookmarks.active
- def bookmarkheads(self, bookmark):
- name = bookmark.split('@', 1)[0]
- heads = []
- for mark, n in self._bookmarks.iteritems():
- if mark.split('@', 1)[0] == name:
- heads.append(n)
- return heads
-
# _phaserevs and _phasesets depend on changelog. what we need is to
# call _phasecache.invalidate() if '00changelog.i' was changed, but it
# can't be easily expressed in filecache mechanism.
@@ -520,28 +524,12 @@
@storecache('obsstore')
def obsstore(self):
- # read default format for new obsstore.
- # developer config: format.obsstore-version
- defaultformat = self.ui.configint('format', 'obsstore-version', None)
- # rely on obsstore class default when possible.
- kwargs = {}
- if defaultformat is not None:
- kwargs['defaultformat'] = defaultformat
- readonly = not obsolete.isenabled(self, obsolete.createmarkersopt)
- store = obsolete.obsstore(self.svfs, readonly=readonly,
- **kwargs)
- if store and readonly:
- self.ui.warn(
- _('obsolete feature not enabled but %i markers found!\n')
- % len(list(store)))
- return store
+ return obsolete.makestore(self.ui, self)
@storecache('00changelog.i')
def changelog(self):
- c = changelog.changelog(self.svfs)
- if txnutil.mayhavepending(self.root):
- c.readpending('00changelog.i.a')
- return c
+ return changelog.changelog(self.svfs,
+ trypending=txnutil.mayhavepending(self.root))
def _constructmanifest(self):
# This is a temporary function while we migrate from manifest to
@@ -570,15 +558,23 @@
return nullid
def __getitem__(self, changeid):
- if changeid is None or changeid == wdirrev:
+ if changeid is None:
return context.workingctx(self)
if isinstance(changeid, slice):
+ # wdirrev isn't contiguous so the slice shouldn't include it
return [context.changectx(self, i)
for i in xrange(*changeid.indices(len(self)))
if i not in self.changelog.filteredrevs]
- return context.changectx(self, changeid)
+ try:
+ return context.changectx(self, changeid)
+ except error.WdirUnsupported:
+ return context.workingctx(self)
def __contains__(self, changeid):
+ """True if the given changeid exists
+
+ error.LookupError is raised if an ambiguous node specified.
+ """
try:
self[changeid]
return True
@@ -649,11 +645,6 @@
"""
return hook.hook(self.ui, self, name, throw, **args)
- def tag(self, names, node, message, local, user, date, editor=False):
- self.ui.deprecwarn("use 'tagsmod.tag' instead of 'repo.tag'", '4.2')
- tagsmod.tag(self, names, node, message, local, user, date,
- editor=editor)
-
@filteredpropertycache
def _tagscache(self):
'''Returns a tagscache object that contains various tags related
@@ -841,10 +832,6 @@
return 'store'
return None
- def join(self, f, *insidef):
- self.ui.deprecwarn("use 'repo.vfs.join' instead of 'repo.join'", '4.2')
- return self.vfs.join(os.path.join(f, *insidef))
-
def wjoin(self, f, *insidef):
return self.vfs.reljoin(self.root, f, *insidef)
@@ -857,21 +844,20 @@
return self[changeid]
def setparents(self, p1, p2=nullid):
- self.dirstate.beginparentchange()
- copies = self.dirstate.setparents(p1, p2)
- pctx = self[p1]
- if copies:
- # Adjust copy records, the dirstate cannot do it, it
- # requires access to parents manifests. Preserve them
- # only for entries added to first parent.
- for f in copies:
- if f not in pctx and copies[f] in pctx:
- self.dirstate.copy(copies[f], f)
- if p2 == nullid:
- for f, s in sorted(self.dirstate.copies().items()):
- if f not in pctx and s not in pctx:
- self.dirstate.copy(None, f)
- self.dirstate.endparentchange()
+ with self.dirstate.parentchange():
+ copies = self.dirstate.setparents(p1, p2)
+ pctx = self[p1]
+ if copies:
+ # Adjust copy records, the dirstate cannot do it, it
+ # requires access to parents manifests. Preserve them
+ # only for entries added to first parent.
+ for f in copies:
+ if f not in pctx and copies[f] in pctx:
+ self.dirstate.copy(copies[f], f)
+ if p2 == nullid:
+ for f, s in sorted(self.dirstate.copies().items()):
+ if f not in pctx and s not in pctx:
+ self.dirstate.copy(None, f)
def filectx(self, path, changeid=None, fileid=None):
"""changeid can be a changeset revision, node, or tag.
@@ -884,15 +870,6 @@
def pathto(self, f, cwd=None):
return self.dirstate.pathto(f, cwd)
- def wfile(self, f, mode='r'):
- self.ui.deprecwarn("use 'repo.wvfs' instead of 'repo.wfile'", '4.2')
- return self.wvfs(f, mode)
-
- def _link(self, f):
- self.ui.deprecwarn("use 'repo.wvfs.islink' instead of 'repo._link'",
- '4.2')
- return self.wvfs.islink(f)
-
def _loadfilter(self, filter):
if filter not in self.filterpats:
l = []
@@ -1100,6 +1077,7 @@
self.store.createmode,
validator=validate,
releasefn=releasefn)
+ tr.changes['revs'] = set()
tr.hookargs['txnid'] = txnid
# note: writing the fncache only during finalize mean that the file is
@@ -1120,6 +1098,7 @@
**pycompat.strkwargs(hookargs))
reporef()._afterlock(hook)
tr.addfinalize('txnclose-hook', txnclosehook)
+ tr.addpostclose('warms-cache', self._buildcacheupdater(tr))
def txnaborthook(tr2):
"""To be run if transaction is aborted
"""
@@ -1143,6 +1122,7 @@
def undofiles(self):
return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
+ @unfilteredmethod
def _writejournal(self, desc):
self.dirstate.savebackup(None, prefix='journal.')
self.vfs.write("journal.branch",
@@ -1194,11 +1174,11 @@
oldtip = oldlen - 1
if detail and ui.verbose:
- msg = (_('repository tip rolled back to revision %s'
+ msg = (_('repository tip rolled back to revision %d'
' (undo %s: %s)\n')
% (oldtip, desc, detail))
else:
- msg = (_('repository tip rolled back to revision %s'
+ msg = (_('repository tip rolled back to revision %d'
' (undo %s)\n')
% (oldtip, desc))
except IOError:
@@ -1254,6 +1234,38 @@
self.destroyed()
return 0
+ def _buildcacheupdater(self, newtransaction):
+ """called during transaction to build the callback updating cache
+
+ Lives on the repository to help extension who might want to augment
+ this logic. For this purpose, the created transaction is passed to the
+ method.
+ """
+ # we must avoid cyclic reference between repo and transaction.
+ reporef = weakref.ref(self)
+ def updater(tr):
+ repo = reporef()
+ repo.updatecaches(tr)
+ return updater
+
+ @unfilteredmethod
+ def updatecaches(self, tr=None):
+ """warm appropriate caches
+
+ If this function is called after a transaction closed. The transaction
+ will be available in the 'tr' argument. This can be used to selectively
+ update caches relevant to the changes in that transaction.
+ """
+ if tr is not None and tr.hookargs.get('source') == 'strip':
+ # During strip, many caches are invalid but
+ # later call to `destroyed` will refresh them.
+ return
+
+ if tr is None or tr.changes['revs']:
+ # updating the unfiltered branchmap should refresh all the others,
+ self.ui.debug('updating the branch cache\n')
+ branchmap.updatecache(self.filtered('served'))
+
def invalidatecaches(self):
if '_tagscache' in vars(self):
@@ -1582,7 +1594,7 @@
wctx = self[None]
merge = len(wctx.parents()) > 1
- if not force and merge and match.ispartial():
+ if not force and merge and not match.always():
raise error.Abort(_('cannot partially commit a merge '
'(do not specify files or patterns)'))
@@ -1800,7 +1812,6 @@
# if minimal phase was 0 we don't need to retract anything
phases.retractboundary(self, tr, targetphase, [n])
tr.close()
- branchmap.updatecache(self.filtered('served'))
return n
finally:
if tr:
@@ -1842,10 +1853,8 @@
self._phasecache.filterunknown(self)
self._phasecache.write()
- # update the 'served' branch cache to help read only server process
- # Thanks to branchcache collaboration this is done from the nearest
- # filtered subset and it is expected to be fast.
- branchmap.updatecache(self.filtered('served'))
+ # refresh all repository caches
+ self.updatecaches()
# Ensure the persistent tag cache is updated. Doing it now
# means that the tag cache only has to worry about destroyed
@@ -1865,6 +1874,7 @@
changeset, finding all files matched by the match
function
'''
+ self.ui.deprecwarn('use repo[node].walk instead of repo.walk', '4.3')
return self[node].walk(match)
def status(self, node1='.', node2=None, match=None,
@@ -1874,6 +1884,36 @@
return self[node1].status(node2, match, ignored, clean, unknown,
listsubrepos)
+ def addpostdsstatus(self, ps):
+ """Add a callback to run within the wlock, at the point at which status
+ fixups happen.
+
+ On status completion, callback(wctx, status) will be called with the
+ wlock held, unless the dirstate has changed from underneath or the wlock
+ couldn't be grabbed.
+
+ Callbacks should not capture and use a cached copy of the dirstate --
+ it might change in the meanwhile. Instead, they should access the
+ dirstate via wctx.repo().dirstate.
+
+ This list is emptied out after each status run -- extensions should
+ make sure it adds to this list each time dirstate.status is called.
+ Extensions should also make sure they don't call this for statuses
+ that don't involve the dirstate.
+ """
+
+ # The list is located here for uniqueness reasons -- it is actually
+ # managed by the workingctx, but that isn't unique per-repo.
+ self._postdsstatus.append(ps)
+
+ def postdsstatus(self):
+ """Used by workingctx to get the list of post-dirstate-status hooks."""
+ return self._postdsstatus
+
+ def clearpostdsstatus(self):
+ """Used by workingctx to clear post-dirstate-status hooks."""
+ del self._postdsstatus[:]
+
def heads(self, start=None):
if start is None:
cl = self.changelog
@@ -2028,7 +2068,7 @@
new repositories.
"""
ui = repo.ui
- requirements = set(['revlogv1'])
+ requirements = {'revlogv1'}
if ui.configbool('format', 'usestore', True):
requirements.add('store')
if ui.configbool('format', 'usefncache', True):
@@ -2055,4 +2095,11 @@
if ui.configbool('experimental', 'manifestv2', False):
requirements.add('manifestv2')
+ revlogv2 = ui.config('experimental', 'revlogv2')
+ if revlogv2 == 'enable-unstable-format-and-corrupt-my-data':
+ requirements.remove('revlogv1')
+ # generaldelta is implied by revlogv2.
+ requirements.discard('generaldelta')
+ requirements.add(REVLOGV2_REQUIREMENT)
+
return requirements
--- a/mercurial/mail.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/mail.py Tue Jun 20 16:33:46 2017 -0400
@@ -24,26 +24,6 @@
util,
)
-_oldheaderinit = email.header.Header.__init__
-def _unifiedheaderinit(self, *args, **kw):
- """
- Python 2.7 introduces a backwards incompatible change
- (Python issue1974, r70772) in email.Generator.Generator code:
- pre-2.7 code passed "continuation_ws='\t'" to the Header
- constructor, and 2.7 removed this parameter.
-
- Default argument is continuation_ws=' ', which means that the
- behavior is different in <2.7 and 2.7
-
- We consider the 2.7 behavior to be preferable, but need
- to have an unified behavior for versions 2.4 to 2.7
- """
- # override continuation_ws
- kw['continuation_ws'] = ' '
- _oldheaderinit(self, *args, **kw)
-
-setattr(email.header.Header, '__init__', _unifiedheaderinit)
-
class STARTTLS(smtplib.SMTP):
'''Derived class to verify the peer certificate for STARTTLS.
--- a/mercurial/manifest.c Tue Jun 13 22:24:41 2017 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,939 +0,0 @@
-/*
- * manifest.c - manifest type that does on-demand parsing.
- *
- * Copyright 2015, Google Inc.
- *
- * This software may be used and distributed according to the terms of
- * the GNU General Public License, incorporated herein by reference.
- */
-#include <Python.h>
-
-#include <assert.h>
-#include <string.h>
-#include <stdlib.h>
-
-#include "util.h"
-
-#define DEFAULT_LINES 100000
-
-typedef struct {
- char *start;
- Py_ssize_t len; /* length of line including terminal newline */
- char hash_suffix;
- bool from_malloc;
- bool deleted;
-} line;
-
-typedef struct {
- PyObject_HEAD
- PyObject *pydata;
- line *lines;
- int numlines; /* number of line entries */
- int livelines; /* number of non-deleted lines */
- int maxlines; /* allocated number of lines */
- bool dirty;
-} lazymanifest;
-
-#define MANIFEST_OOM -1
-#define MANIFEST_NOT_SORTED -2
-#define MANIFEST_MALFORMED -3
-
-/* defined in parsers.c */
-PyObject *unhexlify(const char *str, int len);
-
-/* get the length of the path for a line */
-static size_t pathlen(line *l) {
- return strlen(l->start);
-}
-
-/* get the node value of a single line */
-static PyObject *nodeof(line *l) {
- char *s = l->start;
- ssize_t llen = pathlen(l);
- PyObject *hash = unhexlify(s + llen + 1, 40);
- if (!hash) {
- return NULL;
- }
- if (l->hash_suffix != '\0') {
- char newhash[21];
- memcpy(newhash, PyBytes_AsString(hash), 20);
- Py_DECREF(hash);
- newhash[20] = l->hash_suffix;
- hash = PyBytes_FromStringAndSize(newhash, 21);
- }
- return hash;
-}
-
-/* get the node hash and flags of a line as a tuple */
-static PyObject *hashflags(line *l)
-{
- char *s = l->start;
- size_t plen = pathlen(l);
- PyObject *hash = nodeof(l);
-
- /* 40 for hash, 1 for null byte, 1 for newline */
- size_t hplen = plen + 42;
- Py_ssize_t flen = l->len - hplen;
- PyObject *flags;
- PyObject *tup;
-
- if (!hash)
- return NULL;
- flags = PyBytes_FromStringAndSize(s + hplen - 1, flen);
- if (!flags) {
- Py_DECREF(hash);
- return NULL;
- }
- tup = PyTuple_Pack(2, hash, flags);
- Py_DECREF(flags);
- Py_DECREF(hash);
- return tup;
-}
-
-/* if we're about to run out of space in the line index, add more */
-static bool realloc_if_full(lazymanifest *self)
-{
- if (self->numlines == self->maxlines) {
- self->maxlines *= 2;
- self->lines = realloc(self->lines, self->maxlines * sizeof(line));
- }
- return !!self->lines;
-}
-
-/*
- * Find the line boundaries in the manifest that 'data' points to and store
- * information about each line in 'self'.
- */
-static int find_lines(lazymanifest *self, char *data, Py_ssize_t len)
-{
- char *prev = NULL;
- while (len > 0) {
- line *l;
- char *next = memchr(data, '\n', len);
- if (!next) {
- return MANIFEST_MALFORMED;
- }
- next++; /* advance past newline */
- if (!realloc_if_full(self)) {
- return MANIFEST_OOM; /* no memory */
- }
- if (prev && strcmp(prev, data) > -1) {
- /* This data isn't sorted, so we have to abort. */
- return MANIFEST_NOT_SORTED;
- }
- l = self->lines + ((self->numlines)++);
- l->start = data;
- l->len = next - data;
- l->hash_suffix = '\0';
- l->from_malloc = false;
- l->deleted = false;
- len = len - l->len;
- prev = data;
- data = next;
- }
- self->livelines = self->numlines;
- return 0;
-}
-
-static int lazymanifest_init(lazymanifest *self, PyObject *args)
-{
- char *data;
- Py_ssize_t len;
- int err, ret;
- PyObject *pydata;
- if (!PyArg_ParseTuple(args, "S", &pydata)) {
- return -1;
- }
- err = PyBytes_AsStringAndSize(pydata, &data, &len);
-
- self->dirty = false;
- if (err == -1)
- return -1;
- self->pydata = pydata;
- Py_INCREF(self->pydata);
- Py_BEGIN_ALLOW_THREADS
- self->lines = malloc(DEFAULT_LINES * sizeof(line));
- self->maxlines = DEFAULT_LINES;
- self->numlines = 0;
- if (!self->lines)
- ret = MANIFEST_OOM;
- else
- ret = find_lines(self, data, len);
- Py_END_ALLOW_THREADS
- switch (ret) {
- case 0:
- break;
- case MANIFEST_OOM:
- PyErr_NoMemory();
- break;
- case MANIFEST_NOT_SORTED:
- PyErr_Format(PyExc_ValueError,
- "Manifest lines not in sorted order.");
- break;
- case MANIFEST_MALFORMED:
- PyErr_Format(PyExc_ValueError,
- "Manifest did not end in a newline.");
- break;
- default:
- PyErr_Format(PyExc_ValueError,
- "Unknown problem parsing manifest.");
- }
- return ret == 0 ? 0 : -1;
-}
-
-static void lazymanifest_dealloc(lazymanifest *self)
-{
- /* free any extra lines we had to allocate */
- int i;
- for (i = 0; i < self->numlines; i++) {
- if (self->lines[i].from_malloc) {
- free(self->lines[i].start);
- }
- }
- if (self->lines) {
- free(self->lines);
- self->lines = NULL;
- }
- if (self->pydata) {
- Py_DECREF(self->pydata);
- self->pydata = NULL;
- }
- PyObject_Del(self);
-}
-
-/* iteration support */
-
-typedef struct {
- PyObject_HEAD lazymanifest *m;
- Py_ssize_t pos;
-} lmIter;
-
-static void lmiter_dealloc(PyObject *o)
-{
- lmIter *self = (lmIter *)o;
- Py_DECREF(self->m);
- PyObject_Del(self);
-}
-
-static line *lmiter_nextline(lmIter *self)
-{
- do {
- self->pos++;
- if (self->pos >= self->m->numlines) {
- return NULL;
- }
- /* skip over deleted manifest entries */
- } while (self->m->lines[self->pos].deleted);
- return self->m->lines + self->pos;
-}
-
-static PyObject *lmiter_iterentriesnext(PyObject *o)
-{
- size_t pl;
- line *l;
- Py_ssize_t consumed;
- PyObject *ret = NULL, *path = NULL, *hash = NULL, *flags = NULL;
- l = lmiter_nextline((lmIter *)o);
- if (!l) {
- goto done;
- }
- pl = pathlen(l);
- path = PyBytes_FromStringAndSize(l->start, pl);
- hash = nodeof(l);
- consumed = pl + 41;
- flags = PyBytes_FromStringAndSize(l->start + consumed,
- l->len - consumed - 1);
- if (!path || !hash || !flags) {
- goto done;
- }
- ret = PyTuple_Pack(3, path, hash, flags);
-done:
- Py_XDECREF(path);
- Py_XDECREF(hash);
- Py_XDECREF(flags);
- return ret;
-}
-
-#ifdef IS_PY3K
-#define LAZYMANIFESTENTRIESITERATOR_TPFLAGS Py_TPFLAGS_DEFAULT
-#else
-#define LAZYMANIFESTENTRIESITERATOR_TPFLAGS Py_TPFLAGS_DEFAULT \
- | Py_TPFLAGS_HAVE_ITER
-#endif
-
-static PyTypeObject lazymanifestEntriesIterator = {
- PyVarObject_HEAD_INIT(NULL, 0)
- "parsers.lazymanifest.entriesiterator", /*tp_name */
- sizeof(lmIter), /*tp_basicsize */
- 0, /*tp_itemsize */
- lmiter_dealloc, /*tp_dealloc */
- 0, /*tp_print */
- 0, /*tp_getattr */
- 0, /*tp_setattr */
- 0, /*tp_compare */
- 0, /*tp_repr */
- 0, /*tp_as_number */
- 0, /*tp_as_sequence */
- 0, /*tp_as_mapping */
- 0, /*tp_hash */
- 0, /*tp_call */
- 0, /*tp_str */
- 0, /*tp_getattro */
- 0, /*tp_setattro */
- 0, /*tp_as_buffer */
- LAZYMANIFESTENTRIESITERATOR_TPFLAGS, /* tp_flags */
- "Iterator for 3-tuples in a lazymanifest.", /* tp_doc */
- 0, /* tp_traverse */
- 0, /* tp_clear */
- 0, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- PyObject_SelfIter, /* tp_iter: __iter__() method */
- lmiter_iterentriesnext, /* tp_iternext: next() method */
-};
-
-static PyObject *lmiter_iterkeysnext(PyObject *o)
-{
- size_t pl;
- line *l = lmiter_nextline((lmIter *)o);
- if (!l) {
- return NULL;
- }
- pl = pathlen(l);
- return PyBytes_FromStringAndSize(l->start, pl);
-}
-
-#ifdef IS_PY3K
-#define LAZYMANIFESTKEYSITERATOR_TPFLAGS Py_TPFLAGS_DEFAULT
-#else
-#define LAZYMANIFESTKEYSITERATOR_TPFLAGS Py_TPFLAGS_DEFAULT \
- | Py_TPFLAGS_HAVE_ITER
-#endif
-
-static PyTypeObject lazymanifestKeysIterator = {
- PyVarObject_HEAD_INIT(NULL, 0)
- "parsers.lazymanifest.keysiterator", /*tp_name */
- sizeof(lmIter), /*tp_basicsize */
- 0, /*tp_itemsize */
- lmiter_dealloc, /*tp_dealloc */
- 0, /*tp_print */
- 0, /*tp_getattr */
- 0, /*tp_setattr */
- 0, /*tp_compare */
- 0, /*tp_repr */
- 0, /*tp_as_number */
- 0, /*tp_as_sequence */
- 0, /*tp_as_mapping */
- 0, /*tp_hash */
- 0, /*tp_call */
- 0, /*tp_str */
- 0, /*tp_getattro */
- 0, /*tp_setattro */
- 0, /*tp_as_buffer */
- LAZYMANIFESTKEYSITERATOR_TPFLAGS, /* tp_flags */
- "Keys iterator for a lazymanifest.", /* tp_doc */
- 0, /* tp_traverse */
- 0, /* tp_clear */
- 0, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- PyObject_SelfIter, /* tp_iter: __iter__() method */
- lmiter_iterkeysnext, /* tp_iternext: next() method */
-};
-
-static lazymanifest *lazymanifest_copy(lazymanifest *self);
-
-static PyObject *lazymanifest_getentriesiter(lazymanifest *self)
-{
- lmIter *i = NULL;
- lazymanifest *t = lazymanifest_copy(self);
- if (!t) {
- PyErr_NoMemory();
- return NULL;
- }
- i = PyObject_New(lmIter, &lazymanifestEntriesIterator);
- if (i) {
- i->m = t;
- i->pos = -1;
- } else {
- Py_DECREF(t);
- PyErr_NoMemory();
- }
- return (PyObject *)i;
-}
-
-static PyObject *lazymanifest_getkeysiter(lazymanifest *self)
-{
- lmIter *i = NULL;
- lazymanifest *t = lazymanifest_copy(self);
- if (!t) {
- PyErr_NoMemory();
- return NULL;
- }
- i = PyObject_New(lmIter, &lazymanifestKeysIterator);
- if (i) {
- i->m = t;
- i->pos = -1;
- } else {
- Py_DECREF(t);
- PyErr_NoMemory();
- }
- return (PyObject *)i;
-}
-
-/* __getitem__ and __setitem__ support */
-
-static Py_ssize_t lazymanifest_size(lazymanifest *self)
-{
- return self->livelines;
-}
-
-static int linecmp(const void *left, const void *right)
-{
- return strcmp(((const line *)left)->start,
- ((const line *)right)->start);
-}
-
-static PyObject *lazymanifest_getitem(lazymanifest *self, PyObject *key)
-{
- line needle;
- line *hit;
- if (!PyBytes_Check(key)) {
- PyErr_Format(PyExc_TypeError,
- "getitem: manifest keys must be a string.");
- return NULL;
- }
- needle.start = PyBytes_AsString(key);
- hit = bsearch(&needle, self->lines, self->numlines, sizeof(line),
- &linecmp);
- if (!hit || hit->deleted) {
- PyErr_Format(PyExc_KeyError, "No such manifest entry.");
- return NULL;
- }
- return hashflags(hit);
-}
-
-static int lazymanifest_delitem(lazymanifest *self, PyObject *key)
-{
- line needle;
- line *hit;
- if (!PyBytes_Check(key)) {
- PyErr_Format(PyExc_TypeError,
- "delitem: manifest keys must be a string.");
- return -1;
- }
- needle.start = PyBytes_AsString(key);
- hit = bsearch(&needle, self->lines, self->numlines, sizeof(line),
- &linecmp);
- if (!hit || hit->deleted) {
- PyErr_Format(PyExc_KeyError,
- "Tried to delete nonexistent manifest entry.");
- return -1;
- }
- self->dirty = true;
- hit->deleted = true;
- self->livelines--;
- return 0;
-}
-
-/* Do a binary search for the insertion point for new, creating the
- * new entry if needed. */
-static int internalsetitem(lazymanifest *self, line *new) {
- int start = 0, end = self->numlines;
- while (start < end) {
- int pos = start + (end - start) / 2;
- int c = linecmp(new, self->lines + pos);
- if (c < 0)
- end = pos;
- else if (c > 0)
- start = pos + 1;
- else {
- if (self->lines[pos].deleted)
- self->livelines++;
- if (self->lines[pos].from_malloc)
- free(self->lines[pos].start);
- start = pos;
- goto finish;
- }
- }
- /* being here means we need to do an insert */
- if (!realloc_if_full(self)) {
- PyErr_NoMemory();
- return -1;
- }
- memmove(self->lines + start + 1, self->lines + start,
- (self->numlines - start) * sizeof(line));
- self->numlines++;
- self->livelines++;
-finish:
- self->lines[start] = *new;
- self->dirty = true;
- return 0;
-}
-
-static int lazymanifest_setitem(
- lazymanifest *self, PyObject *key, PyObject *value)
-{
- char *path;
- Py_ssize_t plen;
- PyObject *pyhash;
- Py_ssize_t hlen;
- char *hash;
- PyObject *pyflags;
- char *flags;
- Py_ssize_t flen;
- size_t dlen;
- char *dest;
- int i;
- line new;
- if (!PyBytes_Check(key)) {
- PyErr_Format(PyExc_TypeError,
- "setitem: manifest keys must be a string.");
- return -1;
- }
- if (!value) {
- return lazymanifest_delitem(self, key);
- }
- if (!PyTuple_Check(value) || PyTuple_Size(value) != 2) {
- PyErr_Format(PyExc_TypeError,
- "Manifest values must be a tuple of (node, flags).");
- return -1;
- }
- if (PyBytes_AsStringAndSize(key, &path, &plen) == -1) {
- return -1;
- }
-
- pyhash = PyTuple_GetItem(value, 0);
- if (!PyBytes_Check(pyhash)) {
- PyErr_Format(PyExc_TypeError,
- "node must be a 20-byte string");
- return -1;
- }
- hlen = PyBytes_Size(pyhash);
- /* Some parts of the codebase try and set 21 or 22
- * byte "hash" values in order to perturb things for
- * status. We have to preserve at least the 21st
- * byte. Sigh. If there's a 22nd byte, we drop it on
- * the floor, which works fine.
- */
- if (hlen != 20 && hlen != 21 && hlen != 22) {
- PyErr_Format(PyExc_TypeError,
- "node must be a 20-byte string");
- return -1;
- }
- hash = PyBytes_AsString(pyhash);
-
- pyflags = PyTuple_GetItem(value, 1);
- if (!PyBytes_Check(pyflags) || PyBytes_Size(pyflags) > 1) {
- PyErr_Format(PyExc_TypeError,
- "flags must a 0 or 1 byte string");
- return -1;
- }
- if (PyBytes_AsStringAndSize(pyflags, &flags, &flen) == -1) {
- return -1;
- }
- /* one null byte and one newline */
- dlen = plen + 41 + flen + 1;
- dest = malloc(dlen);
- if (!dest) {
- PyErr_NoMemory();
- return -1;
- }
- memcpy(dest, path, plen + 1);
- for (i = 0; i < 20; i++) {
- /* Cast to unsigned, so it will not get sign-extended when promoted
- * to int (as is done when passing to a variadic function)
- */
- sprintf(dest + plen + 1 + (i * 2), "%02x", (unsigned char)hash[i]);
- }
- memcpy(dest + plen + 41, flags, flen);
- dest[plen + 41 + flen] = '\n';
- new.start = dest;
- new.len = dlen;
- new.hash_suffix = '\0';
- if (hlen > 20) {
- new.hash_suffix = hash[20];
- }
- new.from_malloc = true; /* is `start` a pointer we allocated? */
- new.deleted = false; /* is this entry deleted? */
- if (internalsetitem(self, &new)) {
- return -1;
- }
- return 0;
-}
-
-static PyMappingMethods lazymanifest_mapping_methods = {
- (lenfunc)lazymanifest_size, /* mp_length */
- (binaryfunc)lazymanifest_getitem, /* mp_subscript */
- (objobjargproc)lazymanifest_setitem, /* mp_ass_subscript */
-};
-
-/* sequence methods (important or __contains__ builds an iterator) */
-
-static int lazymanifest_contains(lazymanifest *self, PyObject *key)
-{
- line needle;
- line *hit;
- if (!PyBytes_Check(key)) {
- /* Our keys are always strings, so if the contains
- * check is for a non-string, just return false. */
- return 0;
- }
- needle.start = PyBytes_AsString(key);
- hit = bsearch(&needle, self->lines, self->numlines, sizeof(line),
- &linecmp);
- if (!hit || hit->deleted) {
- return 0;
- }
- return 1;
-}
-
-static PySequenceMethods lazymanifest_seq_meths = {
- (lenfunc)lazymanifest_size, /* sq_length */
- 0, /* sq_concat */
- 0, /* sq_repeat */
- 0, /* sq_item */
- 0, /* sq_slice */
- 0, /* sq_ass_item */
- 0, /* sq_ass_slice */
- (objobjproc)lazymanifest_contains, /* sq_contains */
- 0, /* sq_inplace_concat */
- 0, /* sq_inplace_repeat */
-};
-
-
-/* Other methods (copy, diff, etc) */
-static PyTypeObject lazymanifestType;
-
-/* If the manifest has changes, build the new manifest text and reindex it. */
-static int compact(lazymanifest *self) {
- int i;
- ssize_t need = 0;
- char *data;
- line *src, *dst;
- PyObject *pydata;
- if (!self->dirty)
- return 0;
- for (i = 0; i < self->numlines; i++) {
- if (!self->lines[i].deleted) {
- need += self->lines[i].len;
- }
- }
- pydata = PyBytes_FromStringAndSize(NULL, need);
- if (!pydata)
- return -1;
- data = PyBytes_AsString(pydata);
- if (!data) {
- return -1;
- }
- src = self->lines;
- dst = self->lines;
- for (i = 0; i < self->numlines; i++, src++) {
- char *tofree = NULL;
- if (src->from_malloc) {
- tofree = src->start;
- }
- if (!src->deleted) {
- memcpy(data, src->start, src->len);
- *dst = *src;
- dst->start = data;
- dst->from_malloc = false;
- data += dst->len;
- dst++;
- }
- free(tofree);
- }
- Py_DECREF(self->pydata);
- self->pydata = pydata;
- self->numlines = self->livelines;
- self->dirty = false;
- return 0;
-}
-
-static PyObject *lazymanifest_text(lazymanifest *self)
-{
- if (compact(self) != 0) {
- PyErr_NoMemory();
- return NULL;
- }
- Py_INCREF(self->pydata);
- return self->pydata;
-}
-
-static lazymanifest *lazymanifest_copy(lazymanifest *self)
-{
- lazymanifest *copy = NULL;
- if (compact(self) != 0) {
- goto nomem;
- }
- copy = PyObject_New(lazymanifest, &lazymanifestType);
- if (!copy) {
- goto nomem;
- }
- copy->numlines = self->numlines;
- copy->livelines = self->livelines;
- copy->dirty = false;
- copy->lines = malloc(self->maxlines *sizeof(line));
- if (!copy->lines) {
- goto nomem;
- }
- memcpy(copy->lines, self->lines, self->numlines * sizeof(line));
- copy->maxlines = self->maxlines;
- copy->pydata = self->pydata;
- Py_INCREF(copy->pydata);
- return copy;
-nomem:
- PyErr_NoMemory();
- Py_XDECREF(copy);
- return NULL;
-}
-
-static lazymanifest *lazymanifest_filtercopy(
- lazymanifest *self, PyObject *matchfn)
-{
- lazymanifest *copy = NULL;
- int i;
- if (!PyCallable_Check(matchfn)) {
- PyErr_SetString(PyExc_TypeError, "matchfn must be callable");
- return NULL;
- }
- /* compact ourselves first to avoid double-frees later when we
- * compact tmp so that it doesn't have random pointers to our
- * underlying from_malloc-data (self->pydata is safe) */
- if (compact(self) != 0) {
- goto nomem;
- }
- copy = PyObject_New(lazymanifest, &lazymanifestType);
- if (!copy) {
- goto nomem;
- }
- copy->dirty = true;
- copy->lines = malloc(self->maxlines * sizeof(line));
- if (!copy->lines) {
- goto nomem;
- }
- copy->maxlines = self->maxlines;
- copy->numlines = 0;
- copy->pydata = self->pydata;
- Py_INCREF(self->pydata);
- for (i = 0; i < self->numlines; i++) {
- PyObject *arglist = NULL, *result = NULL;
- arglist = Py_BuildValue("(s)", self->lines[i].start);
- if (!arglist) {
- return NULL;
- }
- result = PyObject_CallObject(matchfn, arglist);
- Py_DECREF(arglist);
- /* if the callback raised an exception, just let it
- * through and give up */
- if (!result) {
- free(copy->lines);
- Py_DECREF(self->pydata);
- return NULL;
- }
- if (PyObject_IsTrue(result)) {
- assert(!(self->lines[i].from_malloc));
- copy->lines[copy->numlines++] = self->lines[i];
- }
- Py_DECREF(result);
- }
- copy->livelines = copy->numlines;
- return copy;
-nomem:
- PyErr_NoMemory();
- Py_XDECREF(copy);
- return NULL;
-}
-
-static PyObject *lazymanifest_diff(lazymanifest *self, PyObject *args)
-{
- lazymanifest *other;
- PyObject *pyclean = NULL;
- bool listclean;
- PyObject *emptyTup = NULL, *ret = NULL;
- PyObject *es;
- int sneedle = 0, oneedle = 0;
- if (!PyArg_ParseTuple(args, "O!|O", &lazymanifestType, &other, &pyclean)) {
- return NULL;
- }
- listclean = (!pyclean) ? false : PyObject_IsTrue(pyclean);
- es = PyBytes_FromString("");
- if (!es) {
- goto nomem;
- }
- emptyTup = PyTuple_Pack(2, Py_None, es);
- Py_DECREF(es);
- if (!emptyTup) {
- goto nomem;
- }
- ret = PyDict_New();
- if (!ret) {
- goto nomem;
- }
- while (sneedle != self->numlines || oneedle != other->numlines) {
- line *left = self->lines + sneedle;
- line *right = other->lines + oneedle;
- int result;
- PyObject *key;
- PyObject *outer;
- /* If we're looking at a deleted entry and it's not
- * the end of the manifest, just skip it. */
- if (left->deleted && sneedle < self->numlines) {
- sneedle++;
- continue;
- }
- if (right->deleted && oneedle < other->numlines) {
- oneedle++;
- continue;
- }
- /* if we're at the end of either manifest, then we
- * know the remaining items are adds so we can skip
- * the strcmp. */
- if (sneedle == self->numlines) {
- result = 1;
- } else if (oneedle == other->numlines) {
- result = -1;
- } else {
- result = linecmp(left, right);
- }
- key = result <= 0 ?
- PyBytes_FromString(left->start) :
- PyBytes_FromString(right->start);
- if (!key)
- goto nomem;
- if (result < 0) {
- PyObject *l = hashflags(left);
- if (!l) {
- goto nomem;
- }
- outer = PyTuple_Pack(2, l, emptyTup);
- Py_DECREF(l);
- if (!outer) {
- goto nomem;
- }
- PyDict_SetItem(ret, key, outer);
- Py_DECREF(outer);
- sneedle++;
- } else if (result > 0) {
- PyObject *r = hashflags(right);
- if (!r) {
- goto nomem;
- }
- outer = PyTuple_Pack(2, emptyTup, r);
- Py_DECREF(r);
- if (!outer) {
- goto nomem;
- }
- PyDict_SetItem(ret, key, outer);
- Py_DECREF(outer);
- oneedle++;
- } else {
- /* file exists in both manifests */
- if (left->len != right->len
- || memcmp(left->start, right->start, left->len)
- || left->hash_suffix != right->hash_suffix) {
- PyObject *l = hashflags(left);
- PyObject *r;
- if (!l) {
- goto nomem;
- }
- r = hashflags(right);
- if (!r) {
- Py_DECREF(l);
- goto nomem;
- }
- outer = PyTuple_Pack(2, l, r);
- Py_DECREF(l);
- Py_DECREF(r);
- if (!outer) {
- goto nomem;
- }
- PyDict_SetItem(ret, key, outer);
- Py_DECREF(outer);
- } else if (listclean) {
- PyDict_SetItem(ret, key, Py_None);
- }
- sneedle++;
- oneedle++;
- }
- Py_DECREF(key);
- }
- Py_DECREF(emptyTup);
- return ret;
-nomem:
- PyErr_NoMemory();
- Py_XDECREF(ret);
- Py_XDECREF(emptyTup);
- return NULL;
-}
-
-static PyMethodDef lazymanifest_methods[] = {
- {"iterkeys", (PyCFunction)lazymanifest_getkeysiter, METH_NOARGS,
- "Iterate over file names in this lazymanifest."},
- {"iterentries", (PyCFunction)lazymanifest_getentriesiter, METH_NOARGS,
- "Iterate over (path, nodeid, flags) tuples in this lazymanifest."},
- {"copy", (PyCFunction)lazymanifest_copy, METH_NOARGS,
- "Make a copy of this lazymanifest."},
- {"filtercopy", (PyCFunction)lazymanifest_filtercopy, METH_O,
- "Make a copy of this manifest filtered by matchfn."},
- {"diff", (PyCFunction)lazymanifest_diff, METH_VARARGS,
- "Compare this lazymanifest to another one."},
- {"text", (PyCFunction)lazymanifest_text, METH_NOARGS,
- "Encode this manifest to text."},
- {NULL},
-};
-
-#ifdef IS_PY3K
-#define LAZYMANIFEST_TPFLAGS Py_TPFLAGS_DEFAULT
-#else
-#define LAZYMANIFEST_TPFLAGS Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_SEQUENCE_IN
-#endif
-
-static PyTypeObject lazymanifestType = {
- PyVarObject_HEAD_INIT(NULL, 0)
- "parsers.lazymanifest", /* tp_name */
- sizeof(lazymanifest), /* tp_basicsize */
- 0, /* tp_itemsize */
- (destructor)lazymanifest_dealloc, /* tp_dealloc */
- 0, /* tp_print */
- 0, /* tp_getattr */
- 0, /* tp_setattr */
- 0, /* tp_compare */
- 0, /* tp_repr */
- 0, /* tp_as_number */
- &lazymanifest_seq_meths, /* tp_as_sequence */
- &lazymanifest_mapping_methods, /* tp_as_mapping */
- 0, /* tp_hash */
- 0, /* tp_call */
- 0, /* tp_str */
- 0, /* tp_getattro */
- 0, /* tp_setattro */
- 0, /* tp_as_buffer */
- LAZYMANIFEST_TPFLAGS, /* tp_flags */
- "TODO(augie)", /* tp_doc */
- 0, /* tp_traverse */
- 0, /* tp_clear */
- 0, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- (getiterfunc)lazymanifest_getkeysiter, /* tp_iter */
- 0, /* tp_iternext */
- lazymanifest_methods, /* tp_methods */
- 0, /* tp_members */
- 0, /* tp_getset */
- 0, /* tp_base */
- 0, /* tp_dict */
- 0, /* tp_descr_get */
- 0, /* tp_descr_set */
- 0, /* tp_dictoffset */
- (initproc)lazymanifest_init, /* tp_init */
- 0, /* tp_alloc */
-};
-
-void manifest_module_init(PyObject * mod)
-{
- lazymanifestType.tp_new = PyType_GenericNew;
- if (PyType_Ready(&lazymanifestType) < 0)
- return;
- Py_INCREF(&lazymanifestType);
-
- PyModule_AddObject(mod, "lazymanifest",
- (PyObject *)&lazymanifestType);
-}
--- a/mercurial/manifest.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/manifest.py Tue Jun 20 16:33:46 2017 -0400
@@ -8,6 +8,7 @@
from __future__ import absolute_import
import heapq
+import itertools
import os
import struct
@@ -19,11 +20,12 @@
from . import (
error,
mdiff,
- parsers,
+ policy,
revlog,
util,
)
+parsers = policy.importmod(r'parsers')
propertycache = util.propertycache
def _parsev1(data):
@@ -32,7 +34,7 @@
# class exactly matches its C counterpart to try and help
# prevent surprise breakage for anyone that develops against
# the pure version.
- if data and data[-1] != '\n':
+ if data and data[-1:] != '\n':
raise ValueError('Manifest did not end in a newline.')
prev = None
for l in data.splitlines():
@@ -54,7 +56,7 @@
end = data.find('\n', pos + 1) # +1 to skip stem length byte
if end == -1:
raise ValueError('Manifest ended with incomplete file entry.')
- stemlen = ord(data[pos])
+ stemlen = ord(data[pos:pos + 1])
items = data[pos + 1:end].split('\0')
f = prevf[:stemlen] + items[0]
if prevf > f:
@@ -577,9 +579,11 @@
c._lm = self._lm.copy()
return c
- def iteritems(self):
+ def items(self):
return (x[:2] for x in self._lm.iterentries())
+ iteritems = items
+
def iterentries(self):
return self._lm.iterentries()
@@ -778,25 +782,29 @@
def iterentries(self):
self._load()
- for p, n in sorted(self._dirs.items() + self._files.items()):
+ for p, n in sorted(itertools.chain(self._dirs.items(),
+ self._files.items())):
if p in self._files:
yield self._subpath(p), n, self._flags.get(p, '')
else:
for x in n.iterentries():
yield x
- def iteritems(self):
+ def items(self):
self._load()
- for p, n in sorted(self._dirs.items() + self._files.items()):
+ for p, n in sorted(itertools.chain(self._dirs.items(),
+ self._files.items())):
if p in self._files:
yield self._subpath(p), n
else:
for f, sn in n.iteritems():
yield f, sn
+ iteritems = items
+
def iterkeys(self):
self._load()
- for p in sorted(self._dirs.keys() + self._files.keys()):
+ for p in sorted(itertools.chain(self._dirs, self._files)):
if p in self._files:
yield self._subpath(p)
else:
@@ -1175,25 +1183,31 @@
'''A revlog that stores manifest texts. This is responsible for caching the
full-text manifest contents.
'''
- def __init__(self, opener, dir='', dirlogcache=None, indexfile=None):
+ def __init__(self, opener, dir='', dirlogcache=None, indexfile=None,
+ treemanifest=False):
"""Constructs a new manifest revlog
`indexfile` - used by extensions to have two manifests at once, like
when transitioning between flatmanifeset and treemanifests.
+
+ `treemanifest` - used to indicate this is a tree manifest revlog. Opener
+ options can also be used to make this a tree manifest revlog. The opener
+ option takes precedence, so if it is set to True, we ignore whatever
+ value is passed in to the constructor.
"""
# During normal operations, we expect to deal with not more than four
# revs at a time (such as during commit --amend). When rebasing large
# stacks of commits, the number can go up, hence the config knob below.
cachesize = 4
- usetreemanifest = False
+ optiontreemanifest = False
usemanifestv2 = False
opts = getattr(opener, 'options', None)
if opts is not None:
cachesize = opts.get('manifestcachesize', cachesize)
- usetreemanifest = opts.get('treemanifest', usetreemanifest)
+ optiontreemanifest = opts.get('treemanifest', False)
usemanifestv2 = opts.get('manifestv2', usemanifestv2)
- self._treeondisk = usetreemanifest
+ self._treeondisk = optiontreemanifest or treemanifest
self._usemanifestv2 = usemanifestv2
self._fulltextcache = util.lrucachedict(cachesize)
@@ -1231,8 +1245,10 @@
if dir:
assert self._treeondisk
if dir not in self._dirlogcache:
- self._dirlogcache[dir] = manifestrevlog(self.opener, dir,
- self._dirlogcache)
+ mfrevlog = manifestrevlog(self.opener, dir,
+ self._dirlogcache,
+ treemanifest=self._treeondisk)
+ self._dirlogcache[dir] = mfrevlog
return self._dirlogcache[dir]
def add(self, m, transaction, link, p1, p2, added, removed, readtree=None):
@@ -1317,8 +1333,7 @@
cachesize = opts.get('manifestcachesize', cachesize)
self._treeinmem = usetreemanifest
- self._oldmanifest = repo._constructmanifest()
- self._revlog = self._oldmanifest
+ self._revlog = repo._constructmanifest()
# A cache of the manifestctx or treemanifestctx for each directory
self._dirmancache = {}
@@ -1340,12 +1355,7 @@
the revlog
"""
if node in self._dirmancache.get(dir, ()):
- cachemf = self._dirmancache[dir][node]
- # The old manifest may put non-ctx manifests in the cache, so
- # skip those since they don't implement the full api.
- if (isinstance(cachemf, manifestctx) or
- isinstance(cachemf, treemanifestctx)):
- return cachemf
+ return self._dirmancache[dir][node]
if dir:
if self._revlog._treeondisk:
--- a/mercurial/match.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/match.py Tue Jun 20 16:33:46 2017 -0400
@@ -38,7 +38,8 @@
for kind, pat, source in kindpats:
if kind == 'set':
if not ctx:
- raise error.Abort(_("fileset expression with no context"))
+ raise error.ProgrammingError("fileset expression with no "
+ "context")
s = ctx.getfileset(pat)
fset.update(s)
@@ -52,7 +53,7 @@
return fset, other
def _expandsubinclude(kindpats, root):
- '''Returns the list of subinclude matchers and the kindpats without the
+ '''Returns the list of subinclude matcher args and the kindpats without the
subincludes in it.'''
relmatchers = []
other = []
@@ -64,12 +65,12 @@
path = pathutil.join(sourceroot, pat)
newroot = pathutil.dirname(path)
- relmatcher = match(newroot, '', [], ['include:%s' % path])
+ matcherargs = (newroot, '', [], ['include:%s' % path])
prefix = pathutil.canonpath(root, root, newroot)
if prefix:
prefix += '/'
- relmatchers.append((prefix, relmatcher))
+ relmatchers.append((prefix, matcherargs))
else:
other.append((kind, pat, source))
@@ -84,118 +85,161 @@
return False
return True
-class match(object):
- def __init__(self, root, cwd, patterns, include=None, exclude=None,
- default='glob', exact=False, auditor=None, ctx=None,
- listsubrepos=False, warn=None, badfn=None):
- """build an object to match a set of file patterns
+def match(root, cwd, patterns=None, include=None, exclude=None, default='glob',
+ exact=False, auditor=None, ctx=None, listsubrepos=False, warn=None,
+ badfn=None, icasefs=False):
+ """build an object to match a set of file patterns
+
+ arguments:
+ root - the canonical root of the tree you're matching against
+ cwd - the current working directory, if relevant
+ patterns - patterns to find
+ include - patterns to include (unless they are excluded)
+ exclude - patterns to exclude (even if they are included)
+ default - if a pattern in patterns has no explicit type, assume this one
+ exact - patterns are actually filenames (include/exclude still apply)
+ warn - optional function used for printing warnings
+ badfn - optional bad() callback for this matcher instead of the default
+ icasefs - make a matcher for wdir on case insensitive filesystems, which
+ normalizes the given patterns to the case in the filesystem
- arguments:
- root - the canonical root of the tree you're matching against
- cwd - the current working directory, if relevant
- patterns - patterns to find
- include - patterns to include (unless they are excluded)
- exclude - patterns to exclude (even if they are included)
- default - if a pattern in patterns has no explicit type, assume this one
- exact - patterns are actually filenames (include/exclude still apply)
- warn - optional function used for printing warnings
- badfn - optional bad() callback for this matcher instead of the default
+ a pattern is one of:
+ 'glob:<glob>' - a glob relative to cwd
+ 're:<regexp>' - a regular expression
+ 'path:<path>' - a path relative to repository root, which is matched
+ recursively
+ 'rootfilesin:<path>' - a path relative to repository root, which is
+ matched non-recursively (will not match subdirectories)
+ 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
+ 'relpath:<path>' - a path relative to cwd
+ 'relre:<regexp>' - a regexp that needn't match the start of a name
+ 'set:<fileset>' - a fileset expression
+ 'include:<path>' - a file of patterns to read and include
+ 'subinclude:<path>' - a file of patterns to match against files under
+ the same directory
+ '<something>' - a pattern of the specified default type
+ """
+ normalize = _donormalize
+ if icasefs:
+ if exact:
+ raise error.ProgrammingError("a case-insensitive exact matcher "
+ "doesn't make sense")
+ dirstate = ctx.repo().dirstate
+ dsnormalize = dirstate.normalize
+
+ def normalize(patterns, default, root, cwd, auditor, warn):
+ kp = _donormalize(patterns, default, root, cwd, auditor, warn)
+ kindpats = []
+ for kind, pats, source in kp:
+ if kind not in ('re', 'relre'): # regex can't be normalized
+ p = pats
+ pats = dsnormalize(pats)
+
+ # Preserve the original to handle a case only rename.
+ if p != pats and p in dirstate:
+ kindpats.append((kind, p, source))
+
+ kindpats.append((kind, pats, source))
+ return kindpats
+
+ if exact:
+ m = exactmatcher(root, cwd, patterns, badfn)
+ elif patterns:
+ kindpats = normalize(patterns, default, root, cwd, auditor, warn)
+ if _kindpatsalwaysmatch(kindpats):
+ m = alwaysmatcher(root, cwd, badfn, relativeuipath=True)
+ else:
+ m = patternmatcher(root, cwd, kindpats, ctx=ctx,
+ listsubrepos=listsubrepos, badfn=badfn)
+ else:
+ # It's a little strange that no patterns means to match everything.
+ # Consider changing this to match nothing (probably using nevermatcher).
+ m = alwaysmatcher(root, cwd, badfn)
- a pattern is one of:
- 'glob:<glob>' - a glob relative to cwd
- 're:<regexp>' - a regular expression
- 'path:<path>' - a path relative to repository root, which is matched
- recursively
- 'rootfilesin:<path>' - a path relative to repository root, which is
- matched non-recursively (will not match subdirectories)
- 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
- 'relpath:<path>' - a path relative to cwd
- 'relre:<regexp>' - a regexp that needn't match the start of a name
- 'set:<fileset>' - a fileset expression
- 'include:<path>' - a file of patterns to read and include
- 'subinclude:<path>' - a file of patterns to match against files under
- the same directory
- '<something>' - a pattern of the specified default type
- """
- if include is None:
- include = []
- if exclude is None:
- exclude = []
+ if include:
+ kindpats = normalize(include, 'glob', root, cwd, auditor, warn)
+ im = includematcher(root, cwd, kindpats, ctx=ctx,
+ listsubrepos=listsubrepos, badfn=None)
+ m = intersectmatchers(m, im)
+ if exclude:
+ kindpats = normalize(exclude, 'glob', root, cwd, auditor, warn)
+ em = includematcher(root, cwd, kindpats, ctx=ctx,
+ listsubrepos=listsubrepos, badfn=None)
+ m = differencematcher(m, em)
+ return m
+
+def exact(root, cwd, files, badfn=None):
+ return exactmatcher(root, cwd, files, badfn=badfn)
+
+def always(root, cwd):
+ return alwaysmatcher(root, cwd)
+
+def never(root, cwd):
+ return nevermatcher(root, cwd)
+
+def badmatch(match, badfn):
+ """Make a copy of the given matcher, replacing its bad method with the given
+ one.
+ """
+ m = copy.copy(match)
+ m.bad = badfn
+ return m
+def _donormalize(patterns, default, root, cwd, auditor, warn):
+ '''Convert 'kind:pat' from the patterns list to tuples with kind and
+ normalized and rooted patterns and with listfiles expanded.'''
+ kindpats = []
+ for kind, pat in [_patsplit(p, default) for p in patterns]:
+ if kind in ('glob', 'relpath'):
+ pat = pathutil.canonpath(root, cwd, pat, auditor)
+ elif kind in ('relglob', 'path', 'rootfilesin'):
+ pat = util.normpath(pat)
+ elif kind in ('listfile', 'listfile0'):
+ try:
+ files = util.readfile(pat)
+ if kind == 'listfile0':
+ files = files.split('\0')
+ else:
+ files = files.splitlines()
+ files = [f for f in files if f]
+ except EnvironmentError:
+ raise error.Abort(_("unable to read file list (%s)") % pat)
+ for k, p, source in _donormalize(files, default, root, cwd,
+ auditor, warn):
+ kindpats.append((k, p, pat))
+ continue
+ elif kind == 'include':
+ try:
+ fullpath = os.path.join(root, util.localpath(pat))
+ includepats = readpatternfile(fullpath, warn)
+ for k, p, source in _donormalize(includepats, default,
+ root, cwd, auditor, warn):
+ kindpats.append((k, p, source or pat))
+ except error.Abort as inst:
+ raise error.Abort('%s: %s' % (pat, inst[0]))
+ except IOError as inst:
+ if warn:
+ warn(_("skipping unreadable pattern file '%s': %s\n") %
+ (pat, inst.strerror))
+ continue
+ # else: re or relre - which cannot be normalized
+ kindpats.append((kind, pat, ''))
+ return kindpats
+
+class basematcher(object):
+
+ def __init__(self, root, cwd, badfn=None, relativeuipath=True):
self._root = root
self._cwd = cwd
- self._files = [] # exact files and roots of patterns
- self._anypats = bool(include or exclude)
- self._always = False
- self._pathrestricted = bool(include or exclude or patterns)
- self._warn = warn
-
- # roots are directories which are recursively included/excluded.
- self._includeroots = set()
- self._excluderoots = set()
- # dirs are directories which are non-recursively included.
- self._includedirs = set(['.'])
-
if badfn is not None:
self.bad = badfn
-
- matchfns = []
- if include:
- kindpats = self._normalize(include, 'glob', root, cwd, auditor)
- self.includepat, im = _buildmatch(ctx, kindpats, '(?:/|$)',
- listsubrepos, root)
- roots, dirs = _rootsanddirs(kindpats)
- self._includeroots.update(roots)
- self._includedirs.update(dirs)
- matchfns.append(im)
- if exclude:
- kindpats = self._normalize(exclude, 'glob', root, cwd, auditor)
- self.excludepat, em = _buildmatch(ctx, kindpats, '(?:/|$)',
- listsubrepos, root)
- if not _anypats(kindpats):
- # Only consider recursive excludes as such - if a non-recursive
- # exclude is used, we must still recurse into the excluded
- # directory, at least to find subdirectories. In such a case,
- # the regex still won't match the non-recursively-excluded
- # files.
- self._excluderoots.update(_roots(kindpats))
- matchfns.append(lambda f: not em(f))
- if exact:
- if isinstance(patterns, list):
- self._files = patterns
- else:
- self._files = list(patterns)
- matchfns.append(self.exact)
- elif patterns:
- kindpats = self._normalize(patterns, default, root, cwd, auditor)
- if not _kindpatsalwaysmatch(kindpats):
- self._files = _explicitfiles(kindpats)
- self._anypats = self._anypats or _anypats(kindpats)
- self.patternspat, pm = _buildmatch(ctx, kindpats, '$',
- listsubrepos, root)
- matchfns.append(pm)
-
- if not matchfns:
- m = util.always
- self._always = True
- elif len(matchfns) == 1:
- m = matchfns[0]
- else:
- def m(f):
- for matchfn in matchfns:
- if not matchfn(f):
- return False
- return True
-
- self.matchfn = m
- self._fileroots = set(self._files)
+ self._relativeuipath = relativeuipath
def __call__(self, fn):
return self.matchfn(fn)
def __iter__(self):
for f in self._files:
yield f
-
# Callbacks related to how the matcher is used by dirstate.walk.
# Subscribers to these events must monkeypatch the matcher object.
def bad(self, f, msg):
@@ -224,7 +268,11 @@
'''Convert repo path to a display path. If patterns or -I/-X were used
to create this matcher, the display path will be relative to cwd.
Otherwise it is relative to the root of the repo.'''
- return (self._pathrestricted and self.rel(f)) or self.abs(f)
+ return (self._relativeuipath and self.rel(f)) or self.abs(f)
+
+ @propertycache
+ def _files(self):
+ return []
def files(self):
'''Explicitly listed files or patterns or roots:
@@ -235,8 +283,15 @@
return self._files
@propertycache
- def _dirs(self):
- return set(util.dirs(self._fileroots)) | set(['.'])
+ def _fileset(self):
+ return set(self._files)
+
+ def exact(self, f):
+ '''Returns True if f is in .files().'''
+ return f in self._fileset
+
+ def matchfn(self, f):
+ return False
def visitdir(self, dir):
'''Decides whether a directory should be visited based on whether it
@@ -250,107 +305,265 @@
This function's behavior is undefined if it has returned False for
one of the dir's parent directories.
'''
- if self.prefix() and dir in self._fileroots:
- return 'all'
- if dir in self._excluderoots:
- return False
- if ((self._includeroots or self._includedirs != set(['.'])) and
- '.' not in self._includeroots and
- dir not in self._includeroots and
- dir not in self._includedirs and
- not any(parent in self._includeroots
- for parent in util.finddirs(dir))):
- return False
- return (not self._fileroots or
- '.' in self._fileroots or
- dir in self._fileroots or
- dir in self._dirs or
- any(parentdir in self._fileroots
- for parentdir in util.finddirs(dir)))
-
- def exact(self, f):
- '''Returns True if f is in .files().'''
- return f in self._fileroots
+ return False
def anypats(self):
'''Matcher uses patterns or include/exclude.'''
- return self._anypats
+ return False
def always(self):
'''Matcher will match everything and .files() will be empty
- optimization might be possible and necessary.'''
- return self._always
-
- def ispartial(self):
- '''True if the matcher won't always match.
-
- Although it's just the inverse of _always in this implementation,
- an extension such as narrowhg might make it return something
- slightly different.'''
- return not self._always
+ return False
def isexact(self):
- return self.matchfn == self.exact
+ return False
def prefix(self):
return not self.always() and not self.isexact() and not self.anypats()
- def _normalize(self, patterns, default, root, cwd, auditor):
- '''Convert 'kind:pat' from the patterns list to tuples with kind and
- normalized and rooted patterns and with listfiles expanded.'''
- kindpats = []
- for kind, pat in [_patsplit(p, default) for p in patterns]:
- if kind in ('glob', 'relpath'):
- pat = pathutil.canonpath(root, cwd, pat, auditor)
- elif kind in ('relglob', 'path', 'rootfilesin'):
- pat = util.normpath(pat)
- elif kind in ('listfile', 'listfile0'):
- try:
- files = util.readfile(pat)
- if kind == 'listfile0':
- files = files.split('\0')
- else:
- files = files.splitlines()
- files = [f for f in files if f]
- except EnvironmentError:
- raise error.Abort(_("unable to read file list (%s)") % pat)
- for k, p, source in self._normalize(files, default, root, cwd,
- auditor):
- kindpats.append((k, p, pat))
- continue
- elif kind == 'include':
- try:
- fullpath = os.path.join(root, util.localpath(pat))
- includepats = readpatternfile(fullpath, self._warn)
- for k, p, source in self._normalize(includepats, default,
- root, cwd, auditor):
- kindpats.append((k, p, source or pat))
- except error.Abort as inst:
- raise error.Abort('%s: %s' % (pat, inst[0]))
- except IOError as inst:
- if self._warn:
- self._warn(_("skipping unreadable pattern file "
- "'%s': %s\n") % (pat, inst.strerror))
- continue
- # else: re or relre - which cannot be normalized
- kindpats.append((kind, pat, ''))
- return kindpats
+class alwaysmatcher(basematcher):
+ '''Matches everything.'''
+
+ def __init__(self, root, cwd, badfn=None, relativeuipath=False):
+ super(alwaysmatcher, self).__init__(root, cwd, badfn,
+ relativeuipath=relativeuipath)
+
+ def always(self):
+ return True
+
+ def matchfn(self, f):
+ return True
+
+ def visitdir(self, dir):
+ return 'all'
+
+ def __repr__(self):
+ return '<alwaysmatcher>'
+
+class nevermatcher(basematcher):
+ '''Matches nothing.'''
+
+ def __init__(self, root, cwd, badfn=None):
+ super(nevermatcher, self).__init__(root, cwd, badfn)
+
+ def __repr__(self):
+ return '<nevermatcher>'
+
+class patternmatcher(basematcher):
+
+ def __init__(self, root, cwd, kindpats, ctx=None, listsubrepos=False,
+ badfn=None):
+ super(patternmatcher, self).__init__(root, cwd, badfn)
+
+ self._files = _explicitfiles(kindpats)
+ self._anypats = _anypats(kindpats)
+ self.patternspat, pm = _buildmatch(ctx, kindpats, '$', listsubrepos,
+ root)
+ self.matchfn = pm
+
+ @propertycache
+ def _dirs(self):
+ return set(util.dirs(self._fileset)) | {'.'}
+
+ def visitdir(self, dir):
+ if self.prefix() and dir in self._fileset:
+ return 'all'
+ return ('.' in self._fileset or
+ dir in self._fileset or
+ dir in self._dirs or
+ any(parentdir in self._fileset
+ for parentdir in util.finddirs(dir)))
+
+ def anypats(self):
+ return self._anypats
+
+ def __repr__(self):
+ return ('<patternmatcher patterns=%r>' % self.patternspat)
+
+class includematcher(basematcher):
+
+ def __init__(self, root, cwd, kindpats, ctx=None, listsubrepos=False,
+ badfn=None):
+ super(includematcher, self).__init__(root, cwd, badfn)
+
+ self.includepat, im = _buildmatch(ctx, kindpats, '(?:/|$)',
+ listsubrepos, root)
+ self._anypats = _anypats(kindpats)
+ roots, dirs = _rootsanddirs(kindpats)
+ # roots are directories which are recursively included.
+ self._roots = set(roots)
+ # dirs are directories which are non-recursively included.
+ self._dirs = set(dirs)
+ self.matchfn = im
+
+ def visitdir(self, dir):
+ if not self._anypats and dir in self._roots:
+ # The condition above is essentially self.prefix() for includes
+ return 'all'
+ return ('.' in self._roots or
+ dir in self._roots or
+ dir in self._dirs or
+ any(parentdir in self._roots
+ for parentdir in util.finddirs(dir)))
+
+ def anypats(self):
+ return True
+
+ def __repr__(self):
+ return ('<includematcher includes=%r>' % self.includepat)
+
+class exactmatcher(basematcher):
+ '''Matches the input files exactly. They are interpreted as paths, not
+ patterns (so no kind-prefixes).
+ '''
+
+ def __init__(self, root, cwd, files, badfn=None):
+ super(exactmatcher, self).__init__(root, cwd, badfn)
+
+ if isinstance(files, list):
+ self._files = files
+ else:
+ self._files = list(files)
+
+ matchfn = basematcher.exact
+
+ @propertycache
+ def _dirs(self):
+ return set(util.dirs(self._fileset)) | {'.'}
+
+ def visitdir(self, dir):
+ return dir in self._dirs
+
+ def isexact(self):
+ return True
+
+ def __repr__(self):
+ return ('<exactmatcher files=%r>' % self._files)
-def exact(root, cwd, files, badfn=None):
- return match(root, cwd, files, exact=True, badfn=badfn)
+class differencematcher(basematcher):
+ '''Composes two matchers by matching if the first matches and the second
+ does not. Well, almost... If the user provides a pattern like "-X foo foo",
+ Mercurial actually does match "foo" against that. That's because exact
+ matches are treated specially. So, since this differencematcher is used for
+ excludes, it needs to special-case exact matching.
+
+ The second matcher's non-matching-attributes (root, cwd, bad, explicitdir,
+ traversedir) are ignored.
+
+ TODO: If we want to keep the behavior described above for exact matches, we
+ should consider instead treating the above case something like this:
+ union(exact(foo), difference(pattern(foo), include(foo)))
+ '''
+ def __init__(self, m1, m2):
+ super(differencematcher, self).__init__(m1._root, m1._cwd)
+ self._m1 = m1
+ self._m2 = m2
+ self.bad = m1.bad
+ self.explicitdir = m1.explicitdir
+ self.traversedir = m1.traversedir
+
+ def matchfn(self, f):
+ return self._m1(f) and (not self._m2(f) or self._m1.exact(f))
-def always(root, cwd):
- return match(root, cwd, [])
+ @propertycache
+ def _files(self):
+ if self.isexact():
+ return [f for f in self._m1.files() if self(f)]
+ # If m1 is not an exact matcher, we can't easily figure out the set of
+ # files, because its files() are not always files. For example, if
+ # m1 is "path:dir" and m2 is "rootfileins:.", we don't
+ # want to remove "dir" from the set even though it would match m2,
+ # because the "dir" in m1 may not be a file.
+ return self._m1.files()
+
+ def visitdir(self, dir):
+ if self._m2.visitdir(dir) == 'all':
+ # There's a bug here: If m1 matches file 'dir/file' and m2 excludes
+ # 'dir' (recursively), we should still visit 'dir' due to the
+ # exception we have for exact matches.
+ return False
+ return bool(self._m1.visitdir(dir))
+
+ def isexact(self):
+ return self._m1.isexact()
+
+ def anypats(self):
+ return self._m1.anypats() or self._m2.anypats()
+
+ def __repr__(self):
+ return ('<differencematcher m1=%r, m2=%r>' % (self._m1, self._m2))
+
+def intersectmatchers(m1, m2):
+ '''Composes two matchers by matching if both of them match.
-def badmatch(match, badfn):
- """Make a copy of the given matcher, replacing its bad method with the given
- one.
- """
- m = copy.copy(match)
- m.bad = badfn
- return m
+ The second matcher's non-matching-attributes (root, cwd, bad, explicitdir,
+ traversedir) are ignored.
+ '''
+ if m1 is None or m2 is None:
+ return m1 or m2
+ if m1.always():
+ m = copy.copy(m2)
+ # TODO: Consider encapsulating these things in a class so there's only
+ # one thing to copy from m1.
+ m.bad = m1.bad
+ m.explicitdir = m1.explicitdir
+ m.traversedir = m1.traversedir
+ m.abs = m1.abs
+ m.rel = m1.rel
+ m._relativeuipath |= m1._relativeuipath
+ return m
+ if m2.always():
+ m = copy.copy(m1)
+ m._relativeuipath |= m2._relativeuipath
+ return m
+ return intersectionmatcher(m1, m2)
+
+class intersectionmatcher(basematcher):
+ def __init__(self, m1, m2):
+ super(intersectionmatcher, self).__init__(m1._root, m1._cwd)
+ self._m1 = m1
+ self._m2 = m2
+ self.bad = m1.bad
+ self.explicitdir = m1.explicitdir
+ self.traversedir = m1.traversedir
-class subdirmatcher(match):
+ @propertycache
+ def _files(self):
+ if self.isexact():
+ m1, m2 = self._m1, self._m2
+ if not m1.isexact():
+ m1, m2 = m2, m1
+ return [f for f in m1.files() if m2(f)]
+ # It neither m1 nor m2 is an exact matcher, we can't easily intersect
+ # the set of files, because their files() are not always files. For
+ # example, if intersecting a matcher "-I glob:foo.txt" with matcher of
+ # "path:dir2", we don't want to remove "dir2" from the set.
+ return self._m1.files() + self._m2.files()
+
+ def matchfn(self, f):
+ return self._m1(f) and self._m2(f)
+
+ def visitdir(self, dir):
+ visit1 = self._m1.visitdir(dir)
+ if visit1 == 'all':
+ return self._m2.visitdir(dir)
+ # bool() because visit1=True + visit2='all' should not be 'all'
+ return bool(visit1 and self._m2.visitdir(dir))
+
+ def always(self):
+ return self._m1.always() and self._m2.always()
+
+ def isexact(self):
+ return self._m1.isexact() or self._m2.isexact()
+
+ def anypats(self):
+ return self._m1.anypats() or self._m2.anypats()
+
+ def __repr__(self):
+ return ('<intersectionmatcher m1=%r, m2=%r>' % (self._m1, self._m2))
+
+class subdirmatcher(basematcher):
"""Adapt a matcher to work on a subdirectory only.
The paths are remapped to remove/insert the path as needed:
@@ -381,79 +594,54 @@
"""
def __init__(self, path, matcher):
- self._root = matcher._root
- self._cwd = matcher._cwd
+ super(subdirmatcher, self).__init__(matcher._root, matcher._cwd)
self._path = path
self._matcher = matcher
- self._always = matcher._always
- self._pathrestricted = matcher._pathrestricted
+ self._always = matcher.always()
self._files = [f[len(path) + 1:] for f in matcher._files
if f.startswith(path + "/")]
- # If the parent repo had a path to this subrepo and no patterns are
- # specified, this submatcher always matches.
- if not self._always and not matcher._anypats:
+ # If the parent repo had a path to this subrepo and the matcher is
+ # a prefix matcher, this submatcher always matches.
+ if matcher.prefix():
self._always = any(f == path for f in matcher._files)
- self._anypats = matcher._anypats
- # Some information is lost in the superclass's constructor, so we
- # can not accurately create the matching function for the subdirectory
- # from the inputs. Instead, we override matchfn() and visitdir() to
- # call the original matcher with the subdirectory path prepended.
- self.matchfn = lambda fn: matcher.matchfn(self._path + "/" + fn)
- def visitdir(dir):
- if dir == '.':
- return matcher.visitdir(self._path)
- return matcher.visitdir(self._path + "/" + dir)
- self.visitdir = visitdir
- self._fileroots = set(self._files)
+ def bad(self, f, msg):
+ self._matcher.bad(self._path + "/" + f, msg)
def abs(self, f):
return self._matcher.abs(self._path + "/" + f)
- def bad(self, f, msg):
- self._matcher.bad(self._path + "/" + f, msg)
-
def rel(self, f):
return self._matcher.rel(self._path + "/" + f)
-class icasefsmatcher(match):
- """A matcher for wdir on case insensitive filesystems, which normalizes the
- given patterns to the case in the filesystem.
- """
+ def uipath(self, f):
+ return self._matcher.uipath(self._path + "/" + f)
- def __init__(self, root, cwd, patterns, include, exclude, default, auditor,
- ctx, listsubrepos=False, badfn=None):
- init = super(icasefsmatcher, self).__init__
- self._dirstate = ctx.repo().dirstate
- self._dsnormalize = self._dirstate.normalize
-
- init(root, cwd, patterns, include, exclude, default, auditor=auditor,
- ctx=ctx, listsubrepos=listsubrepos, badfn=badfn)
+ def matchfn(self, f):
+ # Some information is lost in the superclass's constructor, so we
+ # can not accurately create the matching function for the subdirectory
+ # from the inputs. Instead, we override matchfn() and visitdir() to
+ # call the original matcher with the subdirectory path prepended.
+ return self._matcher.matchfn(self._path + "/" + f)
- # m.exact(file) must be based off of the actual user input, otherwise
- # inexact case matches are treated as exact, and not noted without -v.
- if self._files:
- roots, dirs = _rootsanddirs(self._kp)
- self._fileroots = set(roots)
- self._fileroots.update(dirs)
+ def visitdir(self, dir):
+ if dir == '.':
+ dir = self._path
+ else:
+ dir = self._path + "/" + dir
+ return self._matcher.visitdir(dir)
- def _normalize(self, patterns, default, root, cwd, auditor):
- self._kp = super(icasefsmatcher, self)._normalize(patterns, default,
- root, cwd, auditor)
- kindpats = []
- for kind, pats, source in self._kp:
- if kind not in ('re', 'relre'): # regex can't be normalized
- p = pats
- pats = self._dsnormalize(pats)
+ def always(self):
+ return self._always
- # Preserve the original to handle a case only rename.
- if p != pats and p in self._dirstate:
- kindpats.append((kind, p, source))
+ def anypats(self):
+ return self._matcher.anypats()
- kindpats.append((kind, pats, source))
- return kindpats
+ def __repr__(self):
+ return ('<subdirmatcher path=%r, matcher=%r>' %
+ (self._path, self._matcher))
def patkind(pattern, default=None):
'''If pattern is 'kind:pat' with a known kind, return kind.'''
@@ -584,10 +772,17 @@
subincludes, kindpats = _expandsubinclude(kindpats, root)
if subincludes:
+ submatchers = {}
def matchsubinclude(f):
- for prefix, mf in subincludes:
- if f.startswith(prefix) and mf(f[len(prefix):]):
- return True
+ for prefix, matcherargs in subincludes:
+ if f.startswith(prefix):
+ mf = submatchers.get(prefix)
+ if mf is None:
+ mf = match(*matcherargs)
+ submatchers[prefix] = mf
+
+ if mf(f[len(prefix):]):
+ return True
return False
matchfuncs.append(matchsubinclude)
@@ -677,16 +872,16 @@
>>> _rootsanddirs(\
[('glob', 'g/h/*', ''), ('glob', 'g/h', ''), ('glob', 'g*', '')])
- (['g/h', 'g/h', '.'], ['g'])
+ (['g/h', 'g/h', '.'], ['g', '.'])
>>> _rootsanddirs(\
[('rootfilesin', 'g/h', ''), ('rootfilesin', '', '')])
- ([], ['g/h', '.', 'g'])
+ ([], ['g/h', '.', 'g', '.'])
>>> _rootsanddirs(\
[('relpath', 'r', ''), ('path', 'p/p', ''), ('path', '', '')])
- (['r', 'p/p', '.'], ['p'])
+ (['r', 'p/p', '.'], ['p', '.'])
>>> _rootsanddirs(\
[('relglob', 'rg*', ''), ('re', 're/', ''), ('relre', 'rr', '')])
- (['.', '.', '.'], [])
+ (['.', '.', '.'], ['.'])
'''
r, d = _patternrootsanddirs(kindpats)
@@ -694,6 +889,8 @@
# scanned to get to either the roots or the other exact directories.
d.extend(util.dirs(d))
d.extend(util.dirs(r))
+ # util.dirs() does not include the root directory, so add it manually
+ d.append('.')
return r, d
--- a/mercurial/mdiff.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/mdiff.py Tue Jun 20 16:33:46 2017 -0400
@@ -13,14 +13,21 @@
from .i18n import _
from . import (
- base85,
- bdiff,
error,
- mpatch,
+ policy,
pycompat,
util,
)
+bdiff = policy.importmod(r'bdiff')
+mpatch = policy.importmod(r'mpatch')
+
+blocks = bdiff.blocks
+fixws = bdiff.fixws
+patches = mpatch.patches
+patchedsize = mpatch.patchedsize
+textdiff = bdiff.bdiff
+
def splitnewlines(text):
'''like str.splitlines, but only split on newlines.'''
lines = [l + '\n' for l in text.split('\n')]
@@ -426,7 +433,7 @@
l = chr(ord('A') + l - 1)
else:
l = chr(l - 26 + ord('a') - 1)
- return '%c%s\n' % (l, base85.b85encode(line, True))
+ return '%c%s\n' % (l, util.b85encode(line, True))
def chunk(text, csize=52):
l = len(text)
@@ -478,7 +485,3 @@
def replacediffheader(oldlen, newlen):
return struct.pack(">lll", 0, oldlen, newlen)
-
-patches = mpatch.patches
-patchedsize = mpatch.patchedsize
-textdiff = bdiff.bdiff
--- a/mercurial/merge.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/merge.py Tue Jun 20 16:33:46 2017 -0400
@@ -786,7 +786,7 @@
return True
def manifestmerge(repo, wctx, p2, pa, branchmerge, force, matcher,
- acceptremote, followcopies):
+ acceptremote, followcopies, forcefulldiff=False):
"""
Merge wctx and p2 with ancestor pa and generate merge action list
@@ -801,15 +801,18 @@
# manifests fetched in order are going to be faster, so prime the caches
[x.manifest() for x in
- sorted(wctx.parents() + [p2, pa], key=lambda x: x.rev())]
+ sorted(wctx.parents() + [p2, pa], key=scmutil.intrev)]
if followcopies:
ret = copies.mergecopies(repo, wctx, p2, pa)
copy, movewithdir, diverge, renamedelete, dirmove = ret
+ boolbm = pycompat.bytestr(bool(branchmerge))
+ boolf = pycompat.bytestr(bool(force))
+ boolm = pycompat.bytestr(bool(matcher))
repo.ui.note(_("resolving manifests\n"))
repo.ui.debug(" branchmerge: %s, force: %s, partial: %s\n"
- % (bool(branchmerge), bool(force), bool(matcher)))
+ % (boolbm, boolf, boolm))
repo.ui.debug(" ancestor: %s, local: %s, remote: %s\n" % (pa, wctx, p2))
m1, m2, ma = wctx.manifest(), p2.manifest(), pa.manifest()
@@ -821,6 +824,25 @@
if any(wctx.sub(s).dirty() for s in wctx.substate):
m1['.hgsubstate'] = modifiednodeid
+ # Don't use m2-vs-ma optimization if:
+ # - ma is the same as m1 or m2, which we're just going to diff again later
+ # - The caller specifically asks for a full diff, which is useful during bid
+ # merge.
+ if (pa not in ([wctx, p2] + wctx.parents()) and not forcefulldiff):
+ # Identify which files are relevant to the merge, so we can limit the
+ # total m1-vs-m2 diff to just those files. This has significant
+ # performance benefits in large repositories.
+ relevantfiles = set(ma.diff(m2).keys())
+
+ # For copied and moved files, we need to add the source file too.
+ for copykey, copyvalue in copy.iteritems():
+ if copyvalue in relevantfiles:
+ relevantfiles.add(copykey)
+ for movedirkey in movewithdir:
+ relevantfiles.add(movedirkey)
+ filesmatcher = scmutil.matchfiles(repo, relevantfiles)
+ matcher = matchmod.intersectmatchers(matcher, filesmatcher)
+
diff = m1.diff(m2, match=matcher)
if matcher is None:
@@ -974,7 +996,7 @@
repo.ui.note(_('\ncalculating bids for ancestor %s\n') % ancestor)
actions, diverge1, renamedelete1 = manifestmerge(
repo, wctx, mctx, ancestor, branchmerge, force, matcher,
- acceptremote, followcopies)
+ acceptremote, followcopies, forcefulldiff=True)
_checkunknownfiles(repo, wctx, mctx, force, actions, mergeforce)
# Track the shortest set of warning on the theory that bid
@@ -1268,7 +1290,7 @@
progress(_updating, z, item=f, total=numupdates, unit=_files)
flags, = args
audit(f)
- util.setflags(repo.wjoin(f), 'l' in flags, 'x' in flags)
+ repo.wvfs.setflags(f, 'l' in flags, 'x' in flags)
updated += 1
# the ordering is important here -- ms.mergedriver will raise if the merge
@@ -1676,15 +1698,14 @@
stats = applyupdates(repo, actions, wc, p2, overwrite, labels=labels)
if not partial:
- repo.dirstate.beginparentchange()
- repo.setparents(fp1, fp2)
- recordupdates(repo, actions, branchmerge)
- # update completed, clear state
- util.unlink(repo.vfs.join('updatestate'))
+ with repo.dirstate.parentchange():
+ repo.setparents(fp1, fp2)
+ recordupdates(repo, actions, branchmerge)
+ # update completed, clear state
+ util.unlink(repo.vfs.join('updatestate'))
- if not branchmerge:
- repo.dirstate.setbranch(p2.branch())
- repo.dirstate.endparentchange()
+ if not branchmerge:
+ repo.dirstate.setbranch(p2.branch())
if not partial:
repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
@@ -1722,10 +1743,9 @@
parents.remove(pctx)
pother = parents[0].node()
- repo.dirstate.beginparentchange()
- repo.setparents(repo['.'].node(), pother)
- repo.dirstate.write(repo.currenttransaction())
- # fix up dirstate for copies and renames
- copies.duplicatecopies(repo, ctx.rev(), pctx.rev())
- repo.dirstate.endparentchange()
+ with repo.dirstate.parentchange():
+ repo.setparents(repo['.'].node(), pother)
+ repo.dirstate.write(repo.currenttransaction())
+ # fix up dirstate for copies and renames
+ copies.duplicatecopies(repo, ctx.rev(), pctx.rev())
return stats
--- a/mercurial/minirst.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/minirst.py Tue Jun 20 16:33:46 2017 -0400
@@ -315,7 +315,8 @@
# column markers are ASCII so we can calculate column
# position in bytes
columns = [x for x in xrange(len(div))
- if div[x] == '=' and (x == 0 or div[x - 1] == ' ')]
+ if div[x:x + 1] == '=' and (x == 0 or
+ div[x - 1:x] == ' ')]
rows = []
for l in block['lines'][1:-1]:
if l == div:
@@ -356,7 +357,7 @@
len(block['lines']) == 2 and
encoding.colwidth(block['lines'][0]) == len(block['lines'][1]) and
_sectionre.match(block['lines'][1])):
- block['underline'] = block['lines'][1][0]
+ block['underline'] = block['lines'][1][0:1]
block['type'] = 'section'
del block['lines'][1]
return blocks
@@ -452,7 +453,7 @@
}
def formatoption(block, width):
- desc = ' '.join(map(str.strip, block['lines']))
+ desc = ' '.join(map(bytes.strip, block['lines']))
colwidth = encoding.colwidth(block['optstr'])
usablewidth = width - 1
hanging = block['optstrwidth']
@@ -474,7 +475,7 @@
hang = len(block['lines'][-1]) - len(block['lines'][-1].lstrip())
defindent = indent + hang * ' '
- text = ' '.join(map(str.strip, block['lines']))
+ text = ' '.join(map(bytes.strip, block['lines']))
return '%s\n%s\n' % (indent + admonition,
util.wrap(text, width=width,
initindent=defindent,
@@ -512,7 +513,7 @@
term = indent + block['lines'][0]
hang = len(block['lines'][-1]) - len(block['lines'][-1].lstrip())
defindent = indent + hang * ' '
- text = ' '.join(map(str.strip, block['lines'][1:]))
+ text = ' '.join(map(bytes.strip, block['lines'][1:]))
return '%s\n%s\n' % (term, util.wrap(text, width=width,
initindent=defindent,
hangindent=defindent))
@@ -567,7 +568,7 @@
if btype == 'admonition':
admonition = escape(_admonitiontitles[b['admonitiontitle']])
- text = escape(' '.join(map(str.strip, lines)))
+ text = escape(' '.join(map(bytes.strip, lines)))
out.append('<p>\n<b>%s</b> %s\n</p>\n' % (admonition, text))
elif btype == 'paragraph':
out.append('<p>\n%s\n</p>\n' % escape('\n'.join(lines)))
@@ -597,7 +598,7 @@
elif btype == 'definition':
openlist('dl', level)
term = escape(lines[0])
- text = escape(' '.join(map(str.strip, lines[1:])))
+ text = escape(' '.join(map(bytes.strip, lines[1:])))
out.append(' <dt>%s\n <dd>%s\n' % (term, text))
elif btype == 'bullet':
bullet, head = lines[0].split(' ', 1)
@@ -609,12 +610,12 @@
elif btype == 'field':
openlist('dl', level)
key = escape(b['key'])
- text = escape(' '.join(map(str.strip, lines)))
+ text = escape(' '.join(map(bytes.strip, lines)))
out.append(' <dt>%s\n <dd>%s\n' % (key, text))
elif btype == 'option':
openlist('dl', level)
opt = escape(b['optstr'])
- desc = escape(' '.join(map(str.strip, lines)))
+ desc = escape(' '.join(map(bytes.strip, lines)))
out.append(' <dt>%s\n <dd>%s\n' % (opt, desc))
# close lists if indent level of next block is lower
--- a/mercurial/mpatch_module.c Tue Jun 13 22:24:41 2017 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,195 +0,0 @@
-/*
- mpatch.c - efficient binary patching for Mercurial
-
- This implements a patch algorithm that's O(m + nlog n) where m is the
- size of the output and n is the number of patches.
-
- Given a list of binary patches, it unpacks each into a hunk list,
- then combines the hunk lists with a treewise recursion to form a
- single hunk list. This hunk list is then applied to the original
- text.
-
- The text (or binary) fragments are copied directly from their source
- Python objects into a preallocated output string to avoid the
- allocation of intermediate Python objects. Working memory is about 2x
- the total number of hunks.
-
- Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
-
- This software may be used and distributed according to the terms
- of the GNU General Public License, incorporated herein by reference.
-*/
-
-#define PY_SSIZE_T_CLEAN
-#include <Python.h>
-#include <stdlib.h>
-#include <string.h>
-
-#include "util.h"
-#include "bitmanipulation.h"
-#include "compat.h"
-#include "mpatch.h"
-
-static char mpatch_doc[] = "Efficient binary patching.";
-static PyObject *mpatch_Error;
-
-static void setpyerr(int r)
-{
- switch (r) {
- case MPATCH_ERR_NO_MEM:
- PyErr_NoMemory();
- break;
- case MPATCH_ERR_CANNOT_BE_DECODED:
- PyErr_SetString(mpatch_Error, "patch cannot be decoded");
- break;
- case MPATCH_ERR_INVALID_PATCH:
- PyErr_SetString(mpatch_Error, "invalid patch");
- break;
- }
-}
-
-struct mpatch_flist *cpygetitem(void *bins, ssize_t pos)
-{
- const char *buffer;
- struct mpatch_flist *res;
- ssize_t blen;
- int r;
-
- PyObject *tmp = PyList_GetItem((PyObject*)bins, pos);
- if (!tmp)
- return NULL;
- if (PyObject_AsCharBuffer(tmp, &buffer, (Py_ssize_t*)&blen))
- return NULL;
- if ((r = mpatch_decode(buffer, blen, &res)) < 0) {
- if (!PyErr_Occurred())
- setpyerr(r);
- return NULL;
- }
- return res;
-}
-
-static PyObject *
-patches(PyObject *self, PyObject *args)
-{
- PyObject *text, *bins, *result;
- struct mpatch_flist *patch;
- const char *in;
- int r = 0;
- char *out;
- Py_ssize_t len, outlen, inlen;
-
- if (!PyArg_ParseTuple(args, "OO:mpatch", &text, &bins))
- return NULL;
-
- len = PyList_Size(bins);
- if (!len) {
- /* nothing to do */
- Py_INCREF(text);
- return text;
- }
-
- if (PyObject_AsCharBuffer(text, &in, &inlen))
- return NULL;
-
- patch = mpatch_fold(bins, cpygetitem, 0, len);
- if (!patch) { /* error already set or memory error */
- if (!PyErr_Occurred())
- PyErr_NoMemory();
- return NULL;
- }
-
- outlen = mpatch_calcsize(inlen, patch);
- if (outlen < 0) {
- r = (int)outlen;
- result = NULL;
- goto cleanup;
- }
- result = PyBytes_FromStringAndSize(NULL, outlen);
- if (!result) {
- result = NULL;
- goto cleanup;
- }
- out = PyBytes_AsString(result);
- if ((r = mpatch_apply(out, in, inlen, patch)) < 0) {
- Py_DECREF(result);
- result = NULL;
- }
-cleanup:
- mpatch_lfree(patch);
- if (!result && !PyErr_Occurred())
- setpyerr(r);
- return result;
-}
-
-/* calculate size of a patched file directly */
-static PyObject *
-patchedsize(PyObject *self, PyObject *args)
-{
- long orig, start, end, len, outlen = 0, last = 0, pos = 0;
- Py_ssize_t patchlen;
- char *bin;
-
- if (!PyArg_ParseTuple(args, "ls#", &orig, &bin, &patchlen))
- return NULL;
-
- while (pos >= 0 && pos < patchlen) {
- start = getbe32(bin + pos);
- end = getbe32(bin + pos + 4);
- len = getbe32(bin + pos + 8);
- if (start > end)
- break; /* sanity check */
- pos += 12 + len;
- outlen += start - last;
- last = end;
- outlen += len;
- }
-
- if (pos != patchlen) {
- if (!PyErr_Occurred())
- PyErr_SetString(mpatch_Error, "patch cannot be decoded");
- return NULL;
- }
-
- outlen += orig - last;
- return Py_BuildValue("l", outlen);
-}
-
-static PyMethodDef methods[] = {
- {"patches", patches, METH_VARARGS, "apply a series of patches\n"},
- {"patchedsize", patchedsize, METH_VARARGS, "calculed patched size\n"},
- {NULL, NULL}
-};
-
-#ifdef IS_PY3K
-static struct PyModuleDef mpatch_module = {
- PyModuleDef_HEAD_INIT,
- "mpatch",
- mpatch_doc,
- -1,
- methods
-};
-
-PyMODINIT_FUNC PyInit_mpatch(void)
-{
- PyObject *m;
-
- m = PyModule_Create(&mpatch_module);
- if (m == NULL)
- return NULL;
-
- mpatch_Error = PyErr_NewException("mercurial.mpatch.mpatchError",
- NULL, NULL);
- Py_INCREF(mpatch_Error);
- PyModule_AddObject(m, "mpatchError", mpatch_Error);
-
- return m;
-}
-#else
-PyMODINIT_FUNC
-initmpatch(void)
-{
- Py_InitModule3("mpatch", methods, mpatch_doc);
- mpatch_Error = PyErr_NewException("mercurial.mpatch.mpatchError",
- NULL, NULL);
-}
-#endif
--- a/mercurial/namespaces.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/namespaces.py Tue Jun 20 16:33:46 2017 -0400
@@ -46,7 +46,7 @@
logfmt=_("tag: %s\n"),
listnames=tagnames,
namemap=tagnamemap, nodemap=tagnodemap,
- deprecated=set(['tip']))
+ deprecated={'tip'})
self.addnamespace(n)
bnames = lambda repo: repo.branchmap().keys()
@@ -66,9 +66,11 @@
def __iter__(self):
return self._names.__iter__()
- def iteritems(self):
+ def items(self):
return self._names.iteritems()
+ iteritems = items
+
def addnamespace(self, namespace, order=None):
"""register a namespace
--- a/mercurial/node.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/node.py Tue Jun 20 16:33:46 2017 -0400
@@ -23,12 +23,13 @@
addednodeid = ('0' * 15) + 'added'
modifiednodeid = ('0' * 12) + 'modified'
-wdirnodes = set((newnodeid, addednodeid, modifiednodeid))
+wdirnodes = {newnodeid, addednodeid, modifiednodeid}
# pseudo identifiers for working directory
# (they are experimental, so don't add too many dependencies on them)
wdirrev = 0x7fffffff
wdirid = b"\xff" * 20
+wdirhex = hex(wdirid)
def short(node):
return hex(node[:6])
--- a/mercurial/obsolete.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/obsolete.py Tue Jun 20 16:33:46 2017 -0400
@@ -74,14 +74,15 @@
from .i18n import _
from . import (
- base85,
error,
node,
- parsers,
phases,
+ policy,
util,
)
+parsers = policy.importmod(r'parsers')
+
_pack = struct.pack
_unpack = struct.unpack
_calcsize = struct.calcsize
@@ -96,6 +97,27 @@
allowunstableopt = 'allowunstable'
exchangeopt = 'exchange'
+def isenabled(repo, option):
+ """Returns True if the given repository has the given obsolete option
+ enabled.
+ """
+ result = set(repo.ui.configlist('experimental', 'evolution'))
+ if 'all' in result:
+ return True
+
+ # For migration purposes, temporarily return true if the config hasn't been
+ # set but _enabled is true.
+ if len(result) == 0 and _enabled:
+ return True
+
+ # createmarkers must be enabled if other options are enabled
+ if ((allowunstableopt in result or exchangeopt in result) and
+ not createmarkersopt in result):
+ raise error.Abort(_("'createmarkers' obsolete option must be enabled "
+ "if other obsolete options are enabled"))
+
+ return option in result
+
### obsolescence marker flag
## bumpedfix flag
@@ -218,8 +240,8 @@
if not parents:
# mark that we explicitly recorded no parents
metadata['p0'] = ''
- for i, p in enumerate(parents):
- metadata['p%i' % (i + 1)] = node.hex(p)
+ for i, p in enumerate(parents, 1):
+ metadata['p%i' % i] = node.hex(p)
metadata = _fm0encodemeta(metadata)
numsuc = len(sucs)
format = _fm0fixed + (_fm0node * numsuc)
@@ -417,23 +439,28 @@
formats = {_fm0version: (_fm0readmarkers, _fm0encodeonemarker),
_fm1version: (_fm1readmarkers, _fm1encodeonemarker)}
+def _readmarkerversion(data):
+ return _unpack('>B', data[0:1])[0]
+
@util.nogc
def _readmarkers(data):
"""Read and enumerate markers from raw data"""
- off = 0
- diskversion = _unpack('>B', data[off:off + 1])[0]
- off += 1
+ diskversion = _readmarkerversion(data)
+ off = 1
if diskversion not in formats:
- raise error.Abort(_('parsing obsolete marker: unknown version %r')
- % diskversion)
+ msg = _('parsing obsolete marker: unknown version %r') % diskversion
+ raise error.UnknownVersion(msg, version=diskversion)
return diskversion, formats[diskversion][0](data, off)
+def encodeheader(version=_fm0version):
+ return _pack('>B', version)
+
def encodemarkers(markers, addheader=False, version=_fm0version):
# Kept separate from flushmarkers(), it will be reused for
# markers exchange.
encodeone = formats[version][1]
if addheader:
- yield _pack('>B', version)
+ yield encodeheader(version)
for marker in markers:
yield encodeone(marker)
@@ -531,7 +558,7 @@
# caches for various obsolescence related cache
self.caches = {}
self.svfs = svfs
- self._version = defaultformat
+ self._defaultformat = defaultformat
self._readonly = readonly
def __iter__(self):
@@ -562,7 +589,7 @@
return self._readonly
def create(self, transaction, prec, succs=(), flag=0, parents=None,
- date=None, metadata=None):
+ date=None, metadata=None, ui=None):
"""obsolete: add a new obsolete marker
* ensuring it is hashable
@@ -581,6 +608,10 @@
if 'date' in metadata:
# as a courtesy for out-of-tree extensions
date = util.parsedate(metadata.pop('date'))
+ elif ui is not None:
+ date = ui.configdate('devel', 'default-date')
+ if date is None:
+ date = util.makedate()
else:
date = util.makedate()
if len(prec) != 20:
@@ -604,10 +635,11 @@
if self._readonly:
raise error.Abort(_('creating obsolete markers is not enabled on '
'this repo'))
- known = set(self._all)
+ known = set()
+ getsuccessors = self.successors.get
new = []
for m in markers:
- if m not in known:
+ if m not in getsuccessors(m[0], ()) and m not in known:
known.add(m)
new.append(m)
if new:
@@ -638,8 +670,19 @@
return self.add(transaction, markers)
@propertycache
+ def _data(self):
+ return self.svfs.tryread('obsstore')
+
+ @propertycache
+ def _version(self):
+ if len(self._data) >= 1:
+ return _readmarkerversion(self._data)
+ else:
+ return self._defaultformat
+
+ @propertycache
def _all(self):
- data = self.svfs.tryread('obsstore')
+ data = self._data
if not data:
return []
self._version, markers = _readmarkers(data)
@@ -694,6 +737,7 @@
seenmarkers = set()
seennodes = set(pendingnodes)
precursorsmarkers = self.precursors
+ succsmarkers = self.successors
children = self.children
while pendingnodes:
direct = set()
@@ -701,6 +745,8 @@
direct.update(precursorsmarkers.get(current, ()))
pruned = [m for m in children.get(current, ()) if not m[1]]
direct.update(pruned)
+ pruned = [m for m in succsmarkers.get(current, ()) if not m[1]]
+ direct.update(pruned)
direct -= seenmarkers
pendingnodes = set([m[0] for m in direct])
seenmarkers |= direct
@@ -708,6 +754,147 @@
seennodes |= pendingnodes
return seenmarkers
+def makestore(ui, repo):
+ """Create an obsstore instance from a repo."""
+ # read default format for new obsstore.
+ # developer config: format.obsstore-version
+ defaultformat = ui.configint('format', 'obsstore-version', None)
+ # rely on obsstore class default when possible.
+ kwargs = {}
+ if defaultformat is not None:
+ kwargs['defaultformat'] = defaultformat
+ readonly = not isenabled(repo, createmarkersopt)
+ store = obsstore(repo.svfs, readonly=readonly, **kwargs)
+ if store and readonly:
+ ui.warn(_('obsolete feature not enabled but %i markers found!\n')
+ % len(list(store)))
+ return store
+
+def _filterprunes(markers):
+ """return a set with no prune markers"""
+ return set(m for m in markers if m[1])
+
+def exclusivemarkers(repo, nodes):
+ """set of markers relevant to "nodes" but no other locally-known nodes
+
+ This function compute the set of markers "exclusive" to a locally-known
+ node. This means we walk the markers starting from <nodes> until we reach a
+ locally-known precursors outside of <nodes>. Element of <nodes> with
+ locally-known successors outside of <nodes> are ignored (since their
+ precursors markers are also relevant to these successors).
+
+ For example:
+
+ # (A0 rewritten as A1)
+ #
+ # A0 <-1- A1 # Marker "1" is exclusive to A1
+
+ or
+
+ # (A0 rewritten as AX; AX rewritten as A1; AX is unkown locally)
+ #
+ # <-1- A0 <-2- AX <-3- A1 # Marker "2,3" are exclusive to A1
+
+ or
+
+ # (A0 has unknown precursors, A0 rewritten as A1 and A2 (divergence))
+ #
+ # <-2- A1 # Marker "2" is exclusive to A0,A1
+ # /
+ # <-1- A0
+ # \
+ # <-3- A2 # Marker "3" is exclusive to A0,A2
+ #
+ # in addition:
+ #
+ # Markers "2,3" are exclusive to A1,A2
+ # Markers "1,2,3" are exclusive to A0,A1,A2
+
+ See test/test-obsolete-bundle-strip.t for more examples.
+
+ An example usage is strip. When stripping a changeset, we also want to
+ strip the markers exclusive to this changeset. Otherwise we would have
+ "dangling"" obsolescence markers from its precursors: Obsolescence markers
+ marking a node as obsolete without any successors available locally.
+
+ As for relevant markers, the prune markers for children will be followed.
+ Of course, they will only be followed if the pruned children is
+ locally-known. Since the prune markers are relevant to the pruned node.
+ However, while prune markers are considered relevant to the parent of the
+ pruned changesets, prune markers for locally-known changeset (with no
+ successors) are considered exclusive to the pruned nodes. This allows
+ to strip the prune markers (with the rest of the exclusive chain) alongside
+ the pruned changesets.
+ """
+ # running on a filtered repository would be dangerous as markers could be
+ # reported as exclusive when they are relevant for other filtered nodes.
+ unfi = repo.unfiltered()
+
+ # shortcut to various useful item
+ nm = unfi.changelog.nodemap
+ precursorsmarkers = unfi.obsstore.precursors
+ successormarkers = unfi.obsstore.successors
+ childrenmarkers = unfi.obsstore.children
+
+ # exclusive markers (return of the function)
+ exclmarkers = set()
+ # we need fast membership testing
+ nodes = set(nodes)
+ # looking for head in the obshistory
+ #
+ # XXX we are ignoring all issues in regard with cycle for now.
+ stack = [n for n in nodes if not _filterprunes(successormarkers.get(n, ()))]
+ stack.sort()
+ # nodes already stacked
+ seennodes = set(stack)
+ while stack:
+ current = stack.pop()
+ # fetch precursors markers
+ markers = list(precursorsmarkers.get(current, ()))
+ # extend the list with prune markers
+ for mark in successormarkers.get(current, ()):
+ if not mark[1]:
+ markers.append(mark)
+ # and markers from children (looking for prune)
+ for mark in childrenmarkers.get(current, ()):
+ if not mark[1]:
+ markers.append(mark)
+ # traverse the markers
+ for mark in markers:
+ if mark in exclmarkers:
+ # markers already selected
+ continue
+
+ # If the markers is about the current node, select it
+ #
+ # (this delay the addition of markers from children)
+ if mark[1] or mark[0] == current:
+ exclmarkers.add(mark)
+
+ # should we keep traversing through the precursors?
+ prec = mark[0]
+
+ # nodes in the stack or already processed
+ if prec in seennodes:
+ continue
+
+ # is this a locally known node ?
+ known = prec in nm
+ # if locally-known and not in the <nodes> set the traversal
+ # stop here.
+ if known and prec not in nodes:
+ continue
+
+ # do not keep going if there are unselected markers pointing to this
+ # nodes. If we end up traversing these unselected markers later the
+ # node will be taken care of at that point.
+ precmarkers = _filterprunes(successormarkers.get(prec))
+ if precmarkers.issubset(exclmarkers):
+ seennodes.add(prec)
+ stack.append(prec)
+
+ return exclmarkers
+
def commonversion(versions):
"""Return the newest version listed in both versions and our local formats.
@@ -744,7 +931,7 @@
currentlen += len(nextdata)
for idx, part in enumerate(reversed(parts)):
data = ''.join([_pack('>B', _fm0version)] + part)
- keys['dump%i' % idx] = base85.b85encode(data)
+ keys['dump%i' % idx] = util.b85encode(data)
return keys
def listmarkers(repo):
@@ -757,11 +944,11 @@
"""Push markers over pushkey"""
if not key.startswith('dump'):
repo.ui.warn(_('unknown key: %r') % key)
- return 0
+ return False
if old:
repo.ui.warn(_('unexpected old value for %r') % key)
- return 0
- data = base85.b85decode(new)
+ return False
+ data = util.b85decode(new)
lock = repo.lock()
try:
tr = repo.transaction('pushkey: obsolete markers')
@@ -769,19 +956,21 @@
repo.obsstore.mergemarkers(tr, data)
repo.invalidatevolatilesets()
tr.close()
- return 1
+ return True
finally:
tr.release()
finally:
lock.release()
-def getmarkers(repo, nodes=None):
+def getmarkers(repo, nodes=None, exclusive=False):
"""returns markers known in a repository
If <nodes> is specified, only markers "relevant" to those nodes are are
returned"""
if nodes is None:
rawmarkers = repo.obsstore
+ elif exclusive:
+ rawmarkers = exclusivemarkers(repo, nodes)
else:
rawmarkers = repo.obsstore.relevantmarkers(nodes)
@@ -1084,7 +1273,9 @@
def cachefor(name):
"""Decorator to register a function as computing the cache for a set"""
def decorator(func):
- assert name not in cachefuncs
+ if name in cachefuncs:
+ msg = "duplicated registration for volatileset '%s' (existing: %r)"
+ raise error.ProgrammingError(msg % (name, cachefuncs[name]))
cachefuncs[name] = func
return func
return decorator
@@ -1121,12 +1312,10 @@
@cachefor('obsolete')
def _computeobsoleteset(repo):
"""the set of obsolete revisions"""
- obs = set()
getnode = repo.changelog.node
notpublic = repo._phasecache.getrevset(repo, (phases.draft, phases.secret))
- for r in notpublic:
- if getnode(r) in repo.obsstore.successors:
- obs.add(r)
+ isobs = repo.obsstore.successors.__contains__
+ obs = set(r for r in notpublic if isobs(getnode(r)))
return obs
@cachefor('unstable')
@@ -1205,7 +1394,8 @@
return divergent
-def createmarkers(repo, relations, flag=0, date=None, metadata=None):
+def createmarkers(repo, relations, flag=0, date=None, metadata=None,
+ operation=None):
"""Add obsolete markers between changesets in a repo
<relations> must be an iterable of (<old>, (<new>, ...)[,{metadata}])
@@ -1226,6 +1416,11 @@
metadata = {}
if 'user' not in metadata:
metadata['user'] = repo.ui.username()
+ useoperation = repo.ui.configbool('experimental',
+ 'evolution.track-operation',
+ False)
+ if useoperation and operation:
+ metadata['operation'] = operation
tr = repo.transaction('add-obsolescence-marker')
try:
markerargs = []
@@ -1258,29 +1453,9 @@
for args in markerargs:
nprec, nsucs, npare, localmetadata = args
repo.obsstore.create(tr, nprec, nsucs, flag, parents=npare,
- date=date, metadata=localmetadata)
+ date=date, metadata=localmetadata,
+ ui=repo.ui)
repo.filteredrevcache.clear()
tr.close()
finally:
tr.release()
-
-def isenabled(repo, option):
- """Returns True if the given repository has the given obsolete option
- enabled.
- """
- result = set(repo.ui.configlist('experimental', 'evolution'))
- if 'all' in result:
- return True
-
- # For migration purposes, temporarily return true if the config hasn't been
- # set but _enabled is true.
- if len(result) == 0 and _enabled:
- return True
-
- # createmarkers must be enabled if other options are enabled
- if ((allowunstableopt in result or exchangeopt in result) and
- not createmarkersopt in result):
- raise error.Abort(_("'createmarkers' obsolete option must be enabled "
- "if other obsolete options are enabled"))
-
- return option in result
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/obsutil.py Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,36 @@
+# obsutil.py - utility functions for obsolescence
+#
+# Copyright 2017 Boris Feld <boris.feld@octobus.net>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from __future__ import absolute_import
+
+def closestpredecessors(repo, nodeid):
+ """yield the list of next predecessors pointing on visible changectx nodes
+
+ This function respect the repoview filtering, filtered revision will be
+ considered missing.
+ """
+
+ precursors = repo.obsstore.precursors
+ stack = [nodeid]
+ seen = set(stack)
+
+ while stack:
+ current = stack.pop()
+ currentpreccs = precursors.get(current, ())
+
+ for prec in currentpreccs:
+ precnodeid = prec[0]
+
+ # Basic cycle protection
+ if precnodeid in seen:
+ continue
+ seen.add(precnodeid)
+
+ if precnodeid in repo:
+ yield precnodeid
+ else:
+ stack.append(precnodeid)
--- a/mercurial/osutil.c Tue Jun 13 22:24:41 2017 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,1328 +0,0 @@
-/*
- osutil.c - native operating system services
-
- Copyright 2007 Matt Mackall and others
-
- This software may be used and distributed according to the terms of
- the GNU General Public License, incorporated herein by reference.
-*/
-
-#define _ATFILE_SOURCE
-#include <Python.h>
-#include <fcntl.h>
-#include <stdio.h>
-#include <stdlib.h>
-#include <string.h>
-#include <errno.h>
-
-#ifdef _WIN32
-#include <windows.h>
-#include <io.h>
-#else
-#include <dirent.h>
-#include <sys/socket.h>
-#include <sys/stat.h>
-#include <sys/types.h>
-#include <unistd.h>
-#ifdef HAVE_LINUX_STATFS
-#include <linux/magic.h>
-#include <sys/vfs.h>
-#endif
-#ifdef HAVE_BSD_STATFS
-#include <sys/mount.h>
-#include <sys/param.h>
-#endif
-#endif
-
-#ifdef __APPLE__
-#include <sys/attr.h>
-#include <sys/vnode.h>
-#endif
-
-#include "util.h"
-
-/* some platforms lack the PATH_MAX definition (eg. GNU/Hurd) */
-#ifndef PATH_MAX
-#define PATH_MAX 4096
-#endif
-
-#ifdef _WIN32
-/*
-stat struct compatible with hg expectations
-Mercurial only uses st_mode, st_size and st_mtime
-the rest is kept to minimize changes between implementations
-*/
-struct hg_stat {
- int st_dev;
- int st_mode;
- int st_nlink;
- __int64 st_size;
- int st_mtime;
- int st_ctime;
-};
-struct listdir_stat {
- PyObject_HEAD
- struct hg_stat st;
-};
-#else
-struct listdir_stat {
- PyObject_HEAD
- struct stat st;
-};
-#endif
-
-#ifdef IS_PY3K
-#define listdir_slot(name) \
- static PyObject *listdir_stat_##name(PyObject *self, void *x) \
- { \
- return PyLong_FromLong(((struct listdir_stat *)self)->st.name); \
- }
-#else
-#define listdir_slot(name) \
- static PyObject *listdir_stat_##name(PyObject *self, void *x) \
- { \
- return PyInt_FromLong(((struct listdir_stat *)self)->st.name); \
- }
-#endif
-
-listdir_slot(st_dev)
-listdir_slot(st_mode)
-listdir_slot(st_nlink)
-#ifdef _WIN32
-static PyObject *listdir_stat_st_size(PyObject *self, void *x)
-{
- return PyLong_FromLongLong(
- (PY_LONG_LONG)((struct listdir_stat *)self)->st.st_size);
-}
-#else
-listdir_slot(st_size)
-#endif
-listdir_slot(st_mtime)
-listdir_slot(st_ctime)
-
-static struct PyGetSetDef listdir_stat_getsets[] = {
- {"st_dev", listdir_stat_st_dev, 0, 0, 0},
- {"st_mode", listdir_stat_st_mode, 0, 0, 0},
- {"st_nlink", listdir_stat_st_nlink, 0, 0, 0},
- {"st_size", listdir_stat_st_size, 0, 0, 0},
- {"st_mtime", listdir_stat_st_mtime, 0, 0, 0},
- {"st_ctime", listdir_stat_st_ctime, 0, 0, 0},
- {0, 0, 0, 0, 0}
-};
-
-static PyObject *listdir_stat_new(PyTypeObject *t, PyObject *a, PyObject *k)
-{
- return t->tp_alloc(t, 0);
-}
-
-static void listdir_stat_dealloc(PyObject *o)
-{
- o->ob_type->tp_free(o);
-}
-
-static PyTypeObject listdir_stat_type = {
- PyVarObject_HEAD_INIT(NULL, 0)
- "osutil.stat", /*tp_name*/
- sizeof(struct listdir_stat), /*tp_basicsize*/
- 0, /*tp_itemsize*/
- (destructor)listdir_stat_dealloc, /*tp_dealloc*/
- 0, /*tp_print*/
- 0, /*tp_getattr*/
- 0, /*tp_setattr*/
- 0, /*tp_compare*/
- 0, /*tp_repr*/
- 0, /*tp_as_number*/
- 0, /*tp_as_sequence*/
- 0, /*tp_as_mapping*/
- 0, /*tp_hash */
- 0, /*tp_call*/
- 0, /*tp_str*/
- 0, /*tp_getattro*/
- 0, /*tp_setattro*/
- 0, /*tp_as_buffer*/
- Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /*tp_flags*/
- "stat objects", /* tp_doc */
- 0, /* tp_traverse */
- 0, /* tp_clear */
- 0, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- 0, /* tp_iter */
- 0, /* tp_iternext */
- 0, /* tp_methods */
- 0, /* tp_members */
- listdir_stat_getsets, /* tp_getset */
- 0, /* tp_base */
- 0, /* tp_dict */
- 0, /* tp_descr_get */
- 0, /* tp_descr_set */
- 0, /* tp_dictoffset */
- 0, /* tp_init */
- 0, /* tp_alloc */
- listdir_stat_new, /* tp_new */
-};
-
-#ifdef _WIN32
-
-static int to_python_time(const FILETIME *tm)
-{
- /* number of seconds between epoch and January 1 1601 */
- const __int64 a0 = (__int64)134774L * (__int64)24L * (__int64)3600L;
- /* conversion factor from 100ns to 1s */
- const __int64 a1 = 10000000;
- /* explicit (int) cast to suspend compiler warnings */
- return (int)((((__int64)tm->dwHighDateTime << 32)
- + tm->dwLowDateTime) / a1 - a0);
-}
-
-static PyObject *make_item(const WIN32_FIND_DATAA *fd, int wantstat)
-{
- PyObject *py_st;
- struct hg_stat *stp;
-
- int kind = (fd->dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY)
- ? _S_IFDIR : _S_IFREG;
-
- if (!wantstat)
- return Py_BuildValue("si", fd->cFileName, kind);
-
- py_st = PyObject_CallObject((PyObject *)&listdir_stat_type, NULL);
- if (!py_st)
- return NULL;
-
- stp = &((struct listdir_stat *)py_st)->st;
- /*
- use kind as st_mode
- rwx bits on Win32 are meaningless
- and Hg does not use them anyway
- */
- stp->st_mode = kind;
- stp->st_mtime = to_python_time(&fd->ftLastWriteTime);
- stp->st_ctime = to_python_time(&fd->ftCreationTime);
- if (kind == _S_IFREG)
- stp->st_size = ((__int64)fd->nFileSizeHigh << 32)
- + fd->nFileSizeLow;
- return Py_BuildValue("siN", fd->cFileName,
- kind, py_st);
-}
-
-static PyObject *_listdir(char *path, int plen, int wantstat, char *skip)
-{
- PyObject *rval = NULL; /* initialize - return value */
- PyObject *list;
- HANDLE fh;
- WIN32_FIND_DATAA fd;
- char *pattern;
-
- /* build the path + \* pattern string */
- pattern = PyMem_Malloc(plen + 3); /* path + \* + \0 */
- if (!pattern) {
- PyErr_NoMemory();
- goto error_nomem;
- }
- memcpy(pattern, path, plen);
-
- if (plen > 0) {
- char c = path[plen-1];
- if (c != ':' && c != '/' && c != '\\')
- pattern[plen++] = '\\';
- }
- pattern[plen++] = '*';
- pattern[plen] = '\0';
-
- fh = FindFirstFileA(pattern, &fd);
- if (fh == INVALID_HANDLE_VALUE) {
- PyErr_SetFromWindowsErrWithFilename(GetLastError(), path);
- goto error_file;
- }
-
- list = PyList_New(0);
- if (!list)
- goto error_list;
-
- do {
- PyObject *item;
-
- if (fd.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY) {
- if (!strcmp(fd.cFileName, ".")
- || !strcmp(fd.cFileName, ".."))
- continue;
-
- if (skip && !strcmp(fd.cFileName, skip)) {
- rval = PyList_New(0);
- goto error;
- }
- }
-
- item = make_item(&fd, wantstat);
- if (!item)
- goto error;
-
- if (PyList_Append(list, item)) {
- Py_XDECREF(item);
- goto error;
- }
-
- Py_XDECREF(item);
- } while (FindNextFileA(fh, &fd));
-
- if (GetLastError() != ERROR_NO_MORE_FILES) {
- PyErr_SetFromWindowsErrWithFilename(GetLastError(), path);
- goto error;
- }
-
- rval = list;
- Py_XINCREF(rval);
-error:
- Py_XDECREF(list);
-error_list:
- FindClose(fh);
-error_file:
- PyMem_Free(pattern);
-error_nomem:
- return rval;
-}
-
-#else
-
-int entkind(struct dirent *ent)
-{
-#ifdef DT_REG
- switch (ent->d_type) {
- case DT_REG: return S_IFREG;
- case DT_DIR: return S_IFDIR;
- case DT_LNK: return S_IFLNK;
- case DT_BLK: return S_IFBLK;
- case DT_CHR: return S_IFCHR;
- case DT_FIFO: return S_IFIFO;
- case DT_SOCK: return S_IFSOCK;
- }
-#endif
- return -1;
-}
-
-static PyObject *makestat(const struct stat *st)
-{
- PyObject *stat;
-
- stat = PyObject_CallObject((PyObject *)&listdir_stat_type, NULL);
- if (stat)
- memcpy(&((struct listdir_stat *)stat)->st, st, sizeof(*st));
- return stat;
-}
-
-static PyObject *_listdir_stat(char *path, int pathlen, int keepstat,
- char *skip)
-{
- PyObject *list, *elem, *stat = NULL, *ret = NULL;
- char fullpath[PATH_MAX + 10];
- int kind, err;
- struct stat st;
- struct dirent *ent;
- DIR *dir;
-#ifdef AT_SYMLINK_NOFOLLOW
- int dfd = -1;
-#endif
-
- if (pathlen >= PATH_MAX) {
- errno = ENAMETOOLONG;
- PyErr_SetFromErrnoWithFilename(PyExc_OSError, path);
- goto error_value;
- }
- strncpy(fullpath, path, PATH_MAX);
- fullpath[pathlen] = '/';
-
-#ifdef AT_SYMLINK_NOFOLLOW
- dfd = open(path, O_RDONLY);
- if (dfd == -1) {
- PyErr_SetFromErrnoWithFilename(PyExc_OSError, path);
- goto error_value;
- }
- dir = fdopendir(dfd);
-#else
- dir = opendir(path);
-#endif
- if (!dir) {
- PyErr_SetFromErrnoWithFilename(PyExc_OSError, path);
- goto error_dir;
- }
-
- list = PyList_New(0);
- if (!list)
- goto error_list;
-
- while ((ent = readdir(dir))) {
- if (!strcmp(ent->d_name, ".") || !strcmp(ent->d_name, ".."))
- continue;
-
- kind = entkind(ent);
- if (kind == -1 || keepstat) {
-#ifdef AT_SYMLINK_NOFOLLOW
- err = fstatat(dfd, ent->d_name, &st,
- AT_SYMLINK_NOFOLLOW);
-#else
- strncpy(fullpath + pathlen + 1, ent->d_name,
- PATH_MAX - pathlen);
- fullpath[PATH_MAX] = '\0';
- err = lstat(fullpath, &st);
-#endif
- if (err == -1) {
- /* race with file deletion? */
- if (errno == ENOENT)
- continue;
- strncpy(fullpath + pathlen + 1, ent->d_name,
- PATH_MAX - pathlen);
- fullpath[PATH_MAX] = 0;
- PyErr_SetFromErrnoWithFilename(PyExc_OSError,
- fullpath);
- goto error;
- }
- kind = st.st_mode & S_IFMT;
- }
-
- /* quit early? */
- if (skip && kind == S_IFDIR && !strcmp(ent->d_name, skip)) {
- ret = PyList_New(0);
- goto error;
- }
-
- if (keepstat) {
- stat = makestat(&st);
- if (!stat)
- goto error;
- elem = Py_BuildValue("siN", ent->d_name, kind, stat);
- } else
- elem = Py_BuildValue("si", ent->d_name, kind);
- if (!elem)
- goto error;
- stat = NULL;
-
- PyList_Append(list, elem);
- Py_DECREF(elem);
- }
-
- ret = list;
- Py_INCREF(ret);
-
-error:
- Py_DECREF(list);
- Py_XDECREF(stat);
-error_list:
- closedir(dir);
- /* closedir also closes its dirfd */
- goto error_value;
-error_dir:
-#ifdef AT_SYMLINK_NOFOLLOW
- close(dfd);
-#endif
-error_value:
- return ret;
-}
-
-#ifdef __APPLE__
-
-typedef struct {
- u_int32_t length;
- attrreference_t name;
- fsobj_type_t obj_type;
- struct timespec mtime;
-#if __LITTLE_ENDIAN__
- mode_t access_mask;
- uint16_t padding;
-#else
- uint16_t padding;
- mode_t access_mask;
-#endif
- off_t size;
-} __attribute__((packed)) attrbuf_entry;
-
-int attrkind(attrbuf_entry *entry)
-{
- switch (entry->obj_type) {
- case VREG: return S_IFREG;
- case VDIR: return S_IFDIR;
- case VLNK: return S_IFLNK;
- case VBLK: return S_IFBLK;
- case VCHR: return S_IFCHR;
- case VFIFO: return S_IFIFO;
- case VSOCK: return S_IFSOCK;
- }
- return -1;
-}
-
-/* get these many entries at a time */
-#define LISTDIR_BATCH_SIZE 50
-
-static PyObject *_listdir_batch(char *path, int pathlen, int keepstat,
- char *skip, bool *fallback)
-{
- PyObject *list, *elem, *stat = NULL, *ret = NULL;
- int kind, err;
- unsigned long index;
- unsigned int count, old_state, new_state;
- bool state_seen = false;
- attrbuf_entry *entry;
- /* from the getattrlist(2) man page: a path can be no longer than
- (NAME_MAX * 3 + 1) bytes. Also, "The getattrlist() function will
- silently truncate attribute data if attrBufSize is too small." So
- pass in a buffer big enough for the worst case. */
- char attrbuf[LISTDIR_BATCH_SIZE * (sizeof(attrbuf_entry) + NAME_MAX * 3 + 1)];
- unsigned int basep_unused;
-
- struct stat st;
- int dfd = -1;
-
- /* these must match the attrbuf_entry struct, otherwise you'll end up
- with garbage */
- struct attrlist requested_attr = {0};
- requested_attr.bitmapcount = ATTR_BIT_MAP_COUNT;
- requested_attr.commonattr = (ATTR_CMN_NAME | ATTR_CMN_OBJTYPE |
- ATTR_CMN_MODTIME | ATTR_CMN_ACCESSMASK);
- requested_attr.fileattr = ATTR_FILE_DATALENGTH;
-
- *fallback = false;
-
- if (pathlen >= PATH_MAX) {
- errno = ENAMETOOLONG;
- PyErr_SetFromErrnoWithFilename(PyExc_OSError, path);
- goto error_value;
- }
-
- dfd = open(path, O_RDONLY);
- if (dfd == -1) {
- PyErr_SetFromErrnoWithFilename(PyExc_OSError, path);
- goto error_value;
- }
-
- list = PyList_New(0);
- if (!list)
- goto error_dir;
-
- do {
- count = LISTDIR_BATCH_SIZE;
- err = getdirentriesattr(dfd, &requested_attr, &attrbuf,
- sizeof(attrbuf), &count, &basep_unused,
- &new_state, 0);
- if (err < 0) {
- if (errno == ENOTSUP) {
- /* We're on a filesystem that doesn't support
- getdirentriesattr. Fall back to the
- stat-based implementation. */
- *fallback = true;
- } else
- PyErr_SetFromErrnoWithFilename(PyExc_OSError, path);
- goto error;
- }
-
- if (!state_seen) {
- old_state = new_state;
- state_seen = true;
- } else if (old_state != new_state) {
- /* There's an edge case with getdirentriesattr. Consider
- the following initial list of files:
-
- a
- b
- <--
- c
- d
-
- If the iteration is paused at the arrow, and b is
- deleted before it is resumed, getdirentriesattr will
- not return d at all! Ordinarily we're expected to
- restart the iteration from the beginning. To avoid
- getting stuck in a retry loop here, fall back to
- stat. */
- *fallback = true;
- goto error;
- }
-
- entry = (attrbuf_entry *)attrbuf;
-
- for (index = 0; index < count; index++) {
- char *filename = ((char *)&entry->name) +
- entry->name.attr_dataoffset;
-
- if (!strcmp(filename, ".") || !strcmp(filename, ".."))
- continue;
-
- kind = attrkind(entry);
- if (kind == -1) {
- PyErr_Format(PyExc_OSError,
- "unknown object type %u for file "
- "%s%s!",
- entry->obj_type, path, filename);
- goto error;
- }
-
- /* quit early? */
- if (skip && kind == S_IFDIR && !strcmp(filename, skip)) {
- ret = PyList_New(0);
- goto error;
- }
-
- if (keepstat) {
- /* from the getattrlist(2) man page: "Only the
- permission bits ... are valid". */
- st.st_mode = (entry->access_mask & ~S_IFMT) | kind;
- st.st_mtime = entry->mtime.tv_sec;
- st.st_size = entry->size;
- stat = makestat(&st);
- if (!stat)
- goto error;
- elem = Py_BuildValue("siN", filename, kind, stat);
- } else
- elem = Py_BuildValue("si", filename, kind);
- if (!elem)
- goto error;
- stat = NULL;
-
- PyList_Append(list, elem);
- Py_DECREF(elem);
-
- entry = (attrbuf_entry *)((char *)entry + entry->length);
- }
- } while (err == 0);
-
- ret = list;
- Py_INCREF(ret);
-
-error:
- Py_DECREF(list);
- Py_XDECREF(stat);
-error_dir:
- close(dfd);
-error_value:
- return ret;
-}
-
-#endif /* __APPLE__ */
-
-static PyObject *_listdir(char *path, int pathlen, int keepstat, char *skip)
-{
-#ifdef __APPLE__
- PyObject *ret;
- bool fallback = false;
-
- ret = _listdir_batch(path, pathlen, keepstat, skip, &fallback);
- if (ret != NULL || !fallback)
- return ret;
-#endif
- return _listdir_stat(path, pathlen, keepstat, skip);
-}
-
-static PyObject *statfiles(PyObject *self, PyObject *args)
-{
- PyObject *names, *stats;
- Py_ssize_t i, count;
-
- if (!PyArg_ParseTuple(args, "O:statfiles", &names))
- return NULL;
-
- count = PySequence_Length(names);
- if (count == -1) {
- PyErr_SetString(PyExc_TypeError, "not a sequence");
- return NULL;
- }
-
- stats = PyList_New(count);
- if (stats == NULL)
- return NULL;
-
- for (i = 0; i < count; i++) {
- PyObject *stat, *pypath;
- struct stat st;
- int ret, kind;
- char *path;
-
- /* With a large file count or on a slow filesystem,
- don't block signals for long (issue4878). */
- if ((i % 1000) == 999 && PyErr_CheckSignals() == -1)
- goto bail;
-
- pypath = PySequence_GetItem(names, i);
- if (!pypath)
- goto bail;
- path = PyBytes_AsString(pypath);
- if (path == NULL) {
- Py_DECREF(pypath);
- PyErr_SetString(PyExc_TypeError, "not a string");
- goto bail;
- }
- ret = lstat(path, &st);
- Py_DECREF(pypath);
- kind = st.st_mode & S_IFMT;
- if (ret != -1 && (kind == S_IFREG || kind == S_IFLNK)) {
- stat = makestat(&st);
- if (stat == NULL)
- goto bail;
- PyList_SET_ITEM(stats, i, stat);
- } else {
- Py_INCREF(Py_None);
- PyList_SET_ITEM(stats, i, Py_None);
- }
- }
-
- return stats;
-
-bail:
- Py_DECREF(stats);
- return NULL;
-}
-
-/*
- * recvfds() simply does not release GIL during blocking io operation because
- * command server is known to be single-threaded.
- *
- * Old systems such as Solaris don't provide CMSG_LEN, msg_control, etc.
- * Currently, recvfds() is not supported on these platforms.
- */
-#ifdef CMSG_LEN
-
-static ssize_t recvfdstobuf(int sockfd, int **rfds, void *cbuf, size_t cbufsize)
-{
- char dummy[1];
- struct iovec iov = {dummy, sizeof(dummy)};
- struct msghdr msgh = {0};
- struct cmsghdr *cmsg;
-
- msgh.msg_iov = &iov;
- msgh.msg_iovlen = 1;
- msgh.msg_control = cbuf;
- msgh.msg_controllen = (socklen_t)cbufsize;
- if (recvmsg(sockfd, &msgh, 0) < 0)
- return -1;
-
- for (cmsg = CMSG_FIRSTHDR(&msgh); cmsg;
- cmsg = CMSG_NXTHDR(&msgh, cmsg)) {
- if (cmsg->cmsg_level != SOL_SOCKET ||
- cmsg->cmsg_type != SCM_RIGHTS)
- continue;
- *rfds = (int *)CMSG_DATA(cmsg);
- return (cmsg->cmsg_len - CMSG_LEN(0)) / sizeof(int);
- }
-
- *rfds = cbuf;
- return 0;
-}
-
-static PyObject *recvfds(PyObject *self, PyObject *args)
-{
- int sockfd;
- int *rfds = NULL;
- ssize_t rfdscount, i;
- char cbuf[256];
- PyObject *rfdslist = NULL;
-
- if (!PyArg_ParseTuple(args, "i", &sockfd))
- return NULL;
-
- rfdscount = recvfdstobuf(sockfd, &rfds, cbuf, sizeof(cbuf));
- if (rfdscount < 0)
- return PyErr_SetFromErrno(PyExc_OSError);
-
- rfdslist = PyList_New(rfdscount);
- if (!rfdslist)
- goto bail;
- for (i = 0; i < rfdscount; i++) {
- PyObject *obj = PyLong_FromLong(rfds[i]);
- if (!obj)
- goto bail;
- PyList_SET_ITEM(rfdslist, i, obj);
- }
- return rfdslist;
-
-bail:
- Py_XDECREF(rfdslist);
- return NULL;
-}
-
-#endif /* CMSG_LEN */
-
-#if defined(HAVE_SETPROCTITLE)
-/* setproctitle is the first choice - available in FreeBSD */
-#define SETPROCNAME_USE_SETPROCTITLE
-#elif (defined(__linux__) || defined(__APPLE__)) && PY_MAJOR_VERSION == 2
-/* rewrite the argv buffer in place - works in Linux and OS X. Py_GetArgcArgv
- * in Python 3 returns the copied wchar_t **argv, thus unsupported. */
-#define SETPROCNAME_USE_ARGVREWRITE
-#else
-#define SETPROCNAME_USE_NONE
-#endif
-
-#ifndef SETPROCNAME_USE_NONE
-static PyObject *setprocname(PyObject *self, PyObject *args)
-{
- const char *name = NULL;
- if (!PyArg_ParseTuple(args, "s", &name))
- return NULL;
-
-#if defined(SETPROCNAME_USE_SETPROCTITLE)
- setproctitle("%s", name);
-#elif defined(SETPROCNAME_USE_ARGVREWRITE)
- {
- static char *argvstart = NULL;
- static size_t argvsize = 0;
- if (argvstart == NULL) {
- int argc = 0, i;
- char **argv = NULL;
- char *argvend;
- extern void Py_GetArgcArgv(int *argc, char ***argv);
- Py_GetArgcArgv(&argc, &argv);
-
- /* Check the memory we can use. Typically, argv[i] and
- * argv[i + 1] are continuous. */
- argvend = argvstart = argv[0];
- for (i = 0; i < argc; ++i) {
- if (argv[i] > argvend || argv[i] < argvstart)
- break; /* not continuous */
- size_t len = strlen(argv[i]);
- argvend = argv[i] + len + 1 /* '\0' */;
- }
- if (argvend > argvstart) /* sanity check */
- argvsize = argvend - argvstart;
- }
-
- if (argvstart && argvsize > 1) {
- int n = snprintf(argvstart, argvsize, "%s", name);
- if (n >= 0 && (size_t)n < argvsize)
- memset(argvstart + n, 0, argvsize - n);
- }
- }
-#endif
-
- Py_RETURN_NONE;
-}
-#endif /* ndef SETPROCNAME_USE_NONE */
-
-#if defined(HAVE_BSD_STATFS)
-static const char *describefstype(const struct statfs *pbuf)
-{
- /* BSD or OSX provides a f_fstypename field */
- return pbuf->f_fstypename;
-}
-#elif defined(HAVE_LINUX_STATFS)
-static const char *describefstype(const struct statfs *pbuf)
-{
- /* Begin of Linux filesystems */
-#ifdef ADFS_SUPER_MAGIC
- if (pbuf->f_type == ADFS_SUPER_MAGIC)
- return "adfs";
-#endif
-#ifdef AFFS_SUPER_MAGIC
- if (pbuf->f_type == AFFS_SUPER_MAGIC)
- return "affs";
-#endif
-#ifdef AUTOFS_SUPER_MAGIC
- if (pbuf->f_type == AUTOFS_SUPER_MAGIC)
- return "autofs";
-#endif
-#ifdef BDEVFS_MAGIC
- if (pbuf->f_type == BDEVFS_MAGIC)
- return "bdevfs";
-#endif
-#ifdef BEFS_SUPER_MAGIC
- if (pbuf->f_type == BEFS_SUPER_MAGIC)
- return "befs";
-#endif
-#ifdef BFS_MAGIC
- if (pbuf->f_type == BFS_MAGIC)
- return "bfs";
-#endif
-#ifdef BINFMTFS_MAGIC
- if (pbuf->f_type == BINFMTFS_MAGIC)
- return "binfmtfs";
-#endif
-#ifdef BTRFS_SUPER_MAGIC
- if (pbuf->f_type == BTRFS_SUPER_MAGIC)
- return "btrfs";
-#endif
-#ifdef CGROUP_SUPER_MAGIC
- if (pbuf->f_type == CGROUP_SUPER_MAGIC)
- return "cgroup";
-#endif
-#ifdef CIFS_MAGIC_NUMBER
- if (pbuf->f_type == CIFS_MAGIC_NUMBER)
- return "cifs";
-#endif
-#ifdef CODA_SUPER_MAGIC
- if (pbuf->f_type == CODA_SUPER_MAGIC)
- return "coda";
-#endif
-#ifdef COH_SUPER_MAGIC
- if (pbuf->f_type == COH_SUPER_MAGIC)
- return "coh";
-#endif
-#ifdef CRAMFS_MAGIC
- if (pbuf->f_type == CRAMFS_MAGIC)
- return "cramfs";
-#endif
-#ifdef DEBUGFS_MAGIC
- if (pbuf->f_type == DEBUGFS_MAGIC)
- return "debugfs";
-#endif
-#ifdef DEVFS_SUPER_MAGIC
- if (pbuf->f_type == DEVFS_SUPER_MAGIC)
- return "devfs";
-#endif
-#ifdef DEVPTS_SUPER_MAGIC
- if (pbuf->f_type == DEVPTS_SUPER_MAGIC)
- return "devpts";
-#endif
-#ifdef EFIVARFS_MAGIC
- if (pbuf->f_type == EFIVARFS_MAGIC)
- return "efivarfs";
-#endif
-#ifdef EFS_SUPER_MAGIC
- if (pbuf->f_type == EFS_SUPER_MAGIC)
- return "efs";
-#endif
-#ifdef EXT_SUPER_MAGIC
- if (pbuf->f_type == EXT_SUPER_MAGIC)
- return "ext";
-#endif
-#ifdef EXT2_OLD_SUPER_MAGIC
- if (pbuf->f_type == EXT2_OLD_SUPER_MAGIC)
- return "ext2";
-#endif
-#ifdef EXT2_SUPER_MAGIC
- if (pbuf->f_type == EXT2_SUPER_MAGIC)
- return "ext2";
-#endif
-#ifdef EXT3_SUPER_MAGIC
- if (pbuf->f_type == EXT3_SUPER_MAGIC)
- return "ext3";
-#endif
-#ifdef EXT4_SUPER_MAGIC
- if (pbuf->f_type == EXT4_SUPER_MAGIC)
- return "ext4";
-#endif
-#ifdef F2FS_SUPER_MAGIC
- if (pbuf->f_type == F2FS_SUPER_MAGIC)
- return "f2fs";
-#endif
-#ifdef FUSE_SUPER_MAGIC
- if (pbuf->f_type == FUSE_SUPER_MAGIC)
- return "fuse";
-#endif
-#ifdef FUTEXFS_SUPER_MAGIC
- if (pbuf->f_type == FUTEXFS_SUPER_MAGIC)
- return "futexfs";
-#endif
-#ifdef HFS_SUPER_MAGIC
- if (pbuf->f_type == HFS_SUPER_MAGIC)
- return "hfs";
-#endif
-#ifdef HOSTFS_SUPER_MAGIC
- if (pbuf->f_type == HOSTFS_SUPER_MAGIC)
- return "hostfs";
-#endif
-#ifdef HPFS_SUPER_MAGIC
- if (pbuf->f_type == HPFS_SUPER_MAGIC)
- return "hpfs";
-#endif
-#ifdef HUGETLBFS_MAGIC
- if (pbuf->f_type == HUGETLBFS_MAGIC)
- return "hugetlbfs";
-#endif
-#ifdef ISOFS_SUPER_MAGIC
- if (pbuf->f_type == ISOFS_SUPER_MAGIC)
- return "isofs";
-#endif
-#ifdef JFFS2_SUPER_MAGIC
- if (pbuf->f_type == JFFS2_SUPER_MAGIC)
- return "jffs2";
-#endif
-#ifdef JFS_SUPER_MAGIC
- if (pbuf->f_type == JFS_SUPER_MAGIC)
- return "jfs";
-#endif
-#ifdef MINIX_SUPER_MAGIC
- if (pbuf->f_type == MINIX_SUPER_MAGIC)
- return "minix";
-#endif
-#ifdef MINIX2_SUPER_MAGIC
- if (pbuf->f_type == MINIX2_SUPER_MAGIC)
- return "minix2";
-#endif
-#ifdef MINIX3_SUPER_MAGIC
- if (pbuf->f_type == MINIX3_SUPER_MAGIC)
- return "minix3";
-#endif
-#ifdef MQUEUE_MAGIC
- if (pbuf->f_type == MQUEUE_MAGIC)
- return "mqueue";
-#endif
-#ifdef MSDOS_SUPER_MAGIC
- if (pbuf->f_type == MSDOS_SUPER_MAGIC)
- return "msdos";
-#endif
-#ifdef NCP_SUPER_MAGIC
- if (pbuf->f_type == NCP_SUPER_MAGIC)
- return "ncp";
-#endif
-#ifdef NFS_SUPER_MAGIC
- if (pbuf->f_type == NFS_SUPER_MAGIC)
- return "nfs";
-#endif
-#ifdef NILFS_SUPER_MAGIC
- if (pbuf->f_type == NILFS_SUPER_MAGIC)
- return "nilfs";
-#endif
-#ifdef NTFS_SB_MAGIC
- if (pbuf->f_type == NTFS_SB_MAGIC)
- return "ntfs-sb";
-#endif
-#ifdef OCFS2_SUPER_MAGIC
- if (pbuf->f_type == OCFS2_SUPER_MAGIC)
- return "ocfs2";
-#endif
-#ifdef OPENPROM_SUPER_MAGIC
- if (pbuf->f_type == OPENPROM_SUPER_MAGIC)
- return "openprom";
-#endif
-#ifdef OVERLAYFS_SUPER_MAGIC
- if (pbuf->f_type == OVERLAYFS_SUPER_MAGIC)
- return "overlay";
-#endif
-#ifdef PIPEFS_MAGIC
- if (pbuf->f_type == PIPEFS_MAGIC)
- return "pipefs";
-#endif
-#ifdef PROC_SUPER_MAGIC
- if (pbuf->f_type == PROC_SUPER_MAGIC)
- return "proc";
-#endif
-#ifdef PSTOREFS_MAGIC
- if (pbuf->f_type == PSTOREFS_MAGIC)
- return "pstorefs";
-#endif
-#ifdef QNX4_SUPER_MAGIC
- if (pbuf->f_type == QNX4_SUPER_MAGIC)
- return "qnx4";
-#endif
-#ifdef QNX6_SUPER_MAGIC
- if (pbuf->f_type == QNX6_SUPER_MAGIC)
- return "qnx6";
-#endif
-#ifdef RAMFS_MAGIC
- if (pbuf->f_type == RAMFS_MAGIC)
- return "ramfs";
-#endif
-#ifdef REISERFS_SUPER_MAGIC
- if (pbuf->f_type == REISERFS_SUPER_MAGIC)
- return "reiserfs";
-#endif
-#ifdef ROMFS_MAGIC
- if (pbuf->f_type == ROMFS_MAGIC)
- return "romfs";
-#endif
-#ifdef SECURITYFS_MAGIC
- if (pbuf->f_type == SECURITYFS_MAGIC)
- return "securityfs";
-#endif
-#ifdef SELINUX_MAGIC
- if (pbuf->f_type == SELINUX_MAGIC)
- return "selinux";
-#endif
-#ifdef SMACK_MAGIC
- if (pbuf->f_type == SMACK_MAGIC)
- return "smack";
-#endif
-#ifdef SMB_SUPER_MAGIC
- if (pbuf->f_type == SMB_SUPER_MAGIC)
- return "smb";
-#endif
-#ifdef SOCKFS_MAGIC
- if (pbuf->f_type == SOCKFS_MAGIC)
- return "sockfs";
-#endif
-#ifdef SQUASHFS_MAGIC
- if (pbuf->f_type == SQUASHFS_MAGIC)
- return "squashfs";
-#endif
-#ifdef SYSFS_MAGIC
- if (pbuf->f_type == SYSFS_MAGIC)
- return "sysfs";
-#endif
-#ifdef SYSV2_SUPER_MAGIC
- if (pbuf->f_type == SYSV2_SUPER_MAGIC)
- return "sysv2";
-#endif
-#ifdef SYSV4_SUPER_MAGIC
- if (pbuf->f_type == SYSV4_SUPER_MAGIC)
- return "sysv4";
-#endif
-#ifdef TMPFS_MAGIC
- if (pbuf->f_type == TMPFS_MAGIC)
- return "tmpfs";
-#endif
-#ifdef UDF_SUPER_MAGIC
- if (pbuf->f_type == UDF_SUPER_MAGIC)
- return "udf";
-#endif
-#ifdef UFS_MAGIC
- if (pbuf->f_type == UFS_MAGIC)
- return "ufs";
-#endif
-#ifdef USBDEVICE_SUPER_MAGIC
- if (pbuf->f_type == USBDEVICE_SUPER_MAGIC)
- return "usbdevice";
-#endif
-#ifdef V9FS_MAGIC
- if (pbuf->f_type == V9FS_MAGIC)
- return "v9fs";
-#endif
-#ifdef VXFS_SUPER_MAGIC
- if (pbuf->f_type == VXFS_SUPER_MAGIC)
- return "vxfs";
-#endif
-#ifdef XENFS_SUPER_MAGIC
- if (pbuf->f_type == XENFS_SUPER_MAGIC)
- return "xenfs";
-#endif
-#ifdef XENIX_SUPER_MAGIC
- if (pbuf->f_type == XENIX_SUPER_MAGIC)
- return "xenix";
-#endif
-#ifdef XFS_SUPER_MAGIC
- if (pbuf->f_type == XFS_SUPER_MAGIC)
- return "xfs";
-#endif
- /* End of Linux filesystems */
- return NULL;
-}
-#endif /* def HAVE_LINUX_STATFS */
-
-#if defined(HAVE_BSD_STATFS) || defined(HAVE_LINUX_STATFS)
-/* given a directory path, return filesystem type name (best-effort) */
-static PyObject *getfstype(PyObject *self, PyObject *args)
-{
- const char *path = NULL;
- struct statfs buf;
- int r;
- if (!PyArg_ParseTuple(args, "s", &path))
- return NULL;
-
- memset(&buf, 0, sizeof(buf));
- r = statfs(path, &buf);
- if (r != 0)
- return PyErr_SetFromErrno(PyExc_OSError);
- return Py_BuildValue("s", describefstype(&buf));
-}
-#endif /* defined(HAVE_LINUX_STATFS) || defined(HAVE_BSD_STATFS) */
-
-#endif /* ndef _WIN32 */
-
-static PyObject *listdir(PyObject *self, PyObject *args, PyObject *kwargs)
-{
- PyObject *statobj = NULL; /* initialize - optional arg */
- PyObject *skipobj = NULL; /* initialize - optional arg */
- char *path, *skip = NULL;
- int wantstat, plen;
-
- static char *kwlist[] = {"path", "stat", "skip", NULL};
-
- if (!PyArg_ParseTupleAndKeywords(args, kwargs, "s#|OO:listdir",
- kwlist, &path, &plen, &statobj, &skipobj))
- return NULL;
-
- wantstat = statobj && PyObject_IsTrue(statobj);
-
- if (skipobj && skipobj != Py_None) {
- skip = PyBytes_AsString(skipobj);
- if (!skip)
- return NULL;
- }
-
- return _listdir(path, plen, wantstat, skip);
-}
-
-#ifdef _WIN32
-static PyObject *posixfile(PyObject *self, PyObject *args, PyObject *kwds)
-{
- static char *kwlist[] = {"name", "mode", "buffering", NULL};
- PyObject *file_obj = NULL;
- char *name = NULL;
- char *mode = "rb";
- DWORD access = 0;
- DWORD creation;
- HANDLE handle;
- int fd, flags = 0;
- int bufsize = -1;
- char m0, m1, m2;
- char fpmode[4];
- int fppos = 0;
- int plus;
- FILE *fp;
-
- if (!PyArg_ParseTupleAndKeywords(args, kwds, "et|si:posixfile", kwlist,
- Py_FileSystemDefaultEncoding,
- &name, &mode, &bufsize))
- return NULL;
-
- m0 = mode[0];
- m1 = m0 ? mode[1] : '\0';
- m2 = m1 ? mode[2] : '\0';
- plus = m1 == '+' || m2 == '+';
-
- fpmode[fppos++] = m0;
- if (m1 == 'b' || m2 == 'b') {
- flags = _O_BINARY;
- fpmode[fppos++] = 'b';
- }
- else
- flags = _O_TEXT;
- if (m0 == 'r' && !plus) {
- flags |= _O_RDONLY;
- access = GENERIC_READ;
- } else {
- /*
- work around http://support.microsoft.com/kb/899149 and
- set _O_RDWR for 'w' and 'a', even if mode has no '+'
- */
- flags |= _O_RDWR;
- access = GENERIC_READ | GENERIC_WRITE;
- fpmode[fppos++] = '+';
- }
- fpmode[fppos++] = '\0';
-
- switch (m0) {
- case 'r':
- creation = OPEN_EXISTING;
- break;
- case 'w':
- creation = CREATE_ALWAYS;
- break;
- case 'a':
- creation = OPEN_ALWAYS;
- flags |= _O_APPEND;
- break;
- default:
- PyErr_Format(PyExc_ValueError,
- "mode string must begin with one of 'r', 'w', "
- "or 'a', not '%c'", m0);
- goto bail;
- }
-
- handle = CreateFile(name, access,
- FILE_SHARE_READ | FILE_SHARE_WRITE |
- FILE_SHARE_DELETE,
- NULL,
- creation,
- FILE_ATTRIBUTE_NORMAL,
- 0);
-
- if (handle == INVALID_HANDLE_VALUE) {
- PyErr_SetFromWindowsErrWithFilename(GetLastError(), name);
- goto bail;
- }
-
- fd = _open_osfhandle((intptr_t)handle, flags);
-
- if (fd == -1) {
- CloseHandle(handle);
- PyErr_SetFromErrnoWithFilename(PyExc_IOError, name);
- goto bail;
- }
-#ifndef IS_PY3K
- fp = _fdopen(fd, fpmode);
- if (fp == NULL) {
- _close(fd);
- PyErr_SetFromErrnoWithFilename(PyExc_IOError, name);
- goto bail;
- }
-
- file_obj = PyFile_FromFile(fp, name, mode, fclose);
- if (file_obj == NULL) {
- fclose(fp);
- goto bail;
- }
-
- PyFile_SetBufSize(file_obj, bufsize);
-#else
- file_obj = PyFile_FromFd(fd, name, mode, bufsize, NULL, NULL, NULL, 1);
- if (file_obj == NULL)
- goto bail;
-#endif
-bail:
- PyMem_Free(name);
- return file_obj;
-}
-#endif
-
-#ifdef __APPLE__
-#include <ApplicationServices/ApplicationServices.h>
-
-static PyObject *isgui(PyObject *self)
-{
- CFDictionaryRef dict = CGSessionCopyCurrentDictionary();
-
- if (dict != NULL) {
- CFRelease(dict);
- Py_RETURN_TRUE;
- } else {
- Py_RETURN_FALSE;
- }
-}
-#endif
-
-static char osutil_doc[] = "Native operating system services.";
-
-static PyMethodDef methods[] = {
- {"listdir", (PyCFunction)listdir, METH_VARARGS | METH_KEYWORDS,
- "list a directory\n"},
-#ifdef _WIN32
- {"posixfile", (PyCFunction)posixfile, METH_VARARGS | METH_KEYWORDS,
- "Open a file with POSIX-like semantics.\n"
-"On error, this function may raise either a WindowsError or an IOError."},
-#else
- {"statfiles", (PyCFunction)statfiles, METH_VARARGS | METH_KEYWORDS,
- "stat a series of files or symlinks\n"
-"Returns None for non-existent entries and entries of other types.\n"},
-#ifdef CMSG_LEN
- {"recvfds", (PyCFunction)recvfds, METH_VARARGS,
- "receive list of file descriptors via socket\n"},
-#endif
-#ifndef SETPROCNAME_USE_NONE
- {"setprocname", (PyCFunction)setprocname, METH_VARARGS,
- "set process title (best-effort)\n"},
-#endif
-#if defined(HAVE_BSD_STATFS) || defined(HAVE_LINUX_STATFS)
- {"getfstype", (PyCFunction)getfstype, METH_VARARGS,
- "get filesystem type (best-effort)\n"},
-#endif
-#endif /* ndef _WIN32 */
-#ifdef __APPLE__
- {
- "isgui", (PyCFunction)isgui, METH_NOARGS,
- "Is a CoreGraphics session available?"
- },
-#endif
- {NULL, NULL}
-};
-
-#ifdef IS_PY3K
-static struct PyModuleDef osutil_module = {
- PyModuleDef_HEAD_INIT,
- "osutil",
- osutil_doc,
- -1,
- methods
-};
-
-PyMODINIT_FUNC PyInit_osutil(void)
-{
- if (PyType_Ready(&listdir_stat_type) < 0)
- return NULL;
-
- return PyModule_Create(&osutil_module);
-}
-#else
-PyMODINIT_FUNC initosutil(void)
-{
- if (PyType_Ready(&listdir_stat_type) == -1)
- return;
-
- Py_InitModule3("osutil", methods, osutil_doc);
-}
-#endif
--- a/mercurial/parsers.c Tue Jun 13 22:24:41 2017 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,2943 +0,0 @@
-/*
- parsers.c - efficient content parsing
-
- Copyright 2008 Matt Mackall <mpm@selenic.com> and others
-
- This software may be used and distributed according to the terms of
- the GNU General Public License, incorporated herein by reference.
-*/
-
-#include <Python.h>
-#include <ctype.h>
-#include <stddef.h>
-#include <string.h>
-
-#include "util.h"
-#include "bitmanipulation.h"
-
-#ifdef IS_PY3K
-/* The mapping of Python types is meant to be temporary to get Python
- * 3 to compile. We should remove this once Python 3 support is fully
- * supported and proper types are used in the extensions themselves. */
-#define PyInt_Type PyLong_Type
-#define PyInt_Check PyLong_Check
-#define PyInt_FromLong PyLong_FromLong
-#define PyInt_FromSsize_t PyLong_FromSsize_t
-#define PyInt_AS_LONG PyLong_AS_LONG
-#define PyInt_AsLong PyLong_AsLong
-#endif
-
-static char *versionerrortext = "Python minor version mismatch";
-
-static int8_t hextable[256] = {
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, -1, -1, -1, -1, -1, -1, /* 0-9 */
- -1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1, /* A-F */
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1, /* a-f */
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1
-};
-
-static char lowertable[128] = {
- '\x00', '\x01', '\x02', '\x03', '\x04', '\x05', '\x06', '\x07',
- '\x08', '\x09', '\x0a', '\x0b', '\x0c', '\x0d', '\x0e', '\x0f',
- '\x10', '\x11', '\x12', '\x13', '\x14', '\x15', '\x16', '\x17',
- '\x18', '\x19', '\x1a', '\x1b', '\x1c', '\x1d', '\x1e', '\x1f',
- '\x20', '\x21', '\x22', '\x23', '\x24', '\x25', '\x26', '\x27',
- '\x28', '\x29', '\x2a', '\x2b', '\x2c', '\x2d', '\x2e', '\x2f',
- '\x30', '\x31', '\x32', '\x33', '\x34', '\x35', '\x36', '\x37',
- '\x38', '\x39', '\x3a', '\x3b', '\x3c', '\x3d', '\x3e', '\x3f',
- '\x40',
- '\x61', '\x62', '\x63', '\x64', '\x65', '\x66', '\x67', /* A-G */
- '\x68', '\x69', '\x6a', '\x6b', '\x6c', '\x6d', '\x6e', '\x6f', /* H-O */
- '\x70', '\x71', '\x72', '\x73', '\x74', '\x75', '\x76', '\x77', /* P-W */
- '\x78', '\x79', '\x7a', /* X-Z */
- '\x5b', '\x5c', '\x5d', '\x5e', '\x5f',
- '\x60', '\x61', '\x62', '\x63', '\x64', '\x65', '\x66', '\x67',
- '\x68', '\x69', '\x6a', '\x6b', '\x6c', '\x6d', '\x6e', '\x6f',
- '\x70', '\x71', '\x72', '\x73', '\x74', '\x75', '\x76', '\x77',
- '\x78', '\x79', '\x7a', '\x7b', '\x7c', '\x7d', '\x7e', '\x7f'
-};
-
-static char uppertable[128] = {
- '\x00', '\x01', '\x02', '\x03', '\x04', '\x05', '\x06', '\x07',
- '\x08', '\x09', '\x0a', '\x0b', '\x0c', '\x0d', '\x0e', '\x0f',
- '\x10', '\x11', '\x12', '\x13', '\x14', '\x15', '\x16', '\x17',
- '\x18', '\x19', '\x1a', '\x1b', '\x1c', '\x1d', '\x1e', '\x1f',
- '\x20', '\x21', '\x22', '\x23', '\x24', '\x25', '\x26', '\x27',
- '\x28', '\x29', '\x2a', '\x2b', '\x2c', '\x2d', '\x2e', '\x2f',
- '\x30', '\x31', '\x32', '\x33', '\x34', '\x35', '\x36', '\x37',
- '\x38', '\x39', '\x3a', '\x3b', '\x3c', '\x3d', '\x3e', '\x3f',
- '\x40', '\x41', '\x42', '\x43', '\x44', '\x45', '\x46', '\x47',
- '\x48', '\x49', '\x4a', '\x4b', '\x4c', '\x4d', '\x4e', '\x4f',
- '\x50', '\x51', '\x52', '\x53', '\x54', '\x55', '\x56', '\x57',
- '\x58', '\x59', '\x5a', '\x5b', '\x5c', '\x5d', '\x5e', '\x5f',
- '\x60',
- '\x41', '\x42', '\x43', '\x44', '\x45', '\x46', '\x47', /* a-g */
- '\x48', '\x49', '\x4a', '\x4b', '\x4c', '\x4d', '\x4e', '\x4f', /* h-o */
- '\x50', '\x51', '\x52', '\x53', '\x54', '\x55', '\x56', '\x57', /* p-w */
- '\x58', '\x59', '\x5a', /* x-z */
- '\x7b', '\x7c', '\x7d', '\x7e', '\x7f'
-};
-
-static inline int hexdigit(const char *p, Py_ssize_t off)
-{
- int8_t val = hextable[(unsigned char)p[off]];
-
- if (val >= 0) {
- return val;
- }
-
- PyErr_SetString(PyExc_ValueError, "input contains non-hex character");
- return 0;
-}
-
-/*
- * Turn a hex-encoded string into binary.
- */
-PyObject *unhexlify(const char *str, int len)
-{
- PyObject *ret;
- char *d;
- int i;
-
- ret = PyBytes_FromStringAndSize(NULL, len / 2);
-
- if (!ret)
- return NULL;
-
- d = PyBytes_AsString(ret);
-
- for (i = 0; i < len;) {
- int hi = hexdigit(str, i++);
- int lo = hexdigit(str, i++);
- *d++ = (hi << 4) | lo;
- }
-
- return ret;
-}
-
-static inline PyObject *_asciitransform(PyObject *str_obj,
- const char table[128],
- PyObject *fallback_fn)
-{
- char *str, *newstr;
- Py_ssize_t i, len;
- PyObject *newobj = NULL;
- PyObject *ret = NULL;
-
- str = PyBytes_AS_STRING(str_obj);
- len = PyBytes_GET_SIZE(str_obj);
-
- newobj = PyBytes_FromStringAndSize(NULL, len);
- if (!newobj)
- goto quit;
-
- newstr = PyBytes_AS_STRING(newobj);
-
- for (i = 0; i < len; i++) {
- char c = str[i];
- if (c & 0x80) {
- if (fallback_fn != NULL) {
- ret = PyObject_CallFunctionObjArgs(fallback_fn,
- str_obj, NULL);
- } else {
- PyObject *err = PyUnicodeDecodeError_Create(
- "ascii", str, len, i, (i + 1),
- "unexpected code byte");
- PyErr_SetObject(PyExc_UnicodeDecodeError, err);
- Py_XDECREF(err);
- }
- goto quit;
- }
- newstr[i] = table[(unsigned char)c];
- }
-
- ret = newobj;
- Py_INCREF(ret);
-quit:
- Py_XDECREF(newobj);
- return ret;
-}
-
-static PyObject *asciilower(PyObject *self, PyObject *args)
-{
- PyObject *str_obj;
- if (!PyArg_ParseTuple(args, "O!:asciilower", &PyBytes_Type, &str_obj))
- return NULL;
- return _asciitransform(str_obj, lowertable, NULL);
-}
-
-static PyObject *asciiupper(PyObject *self, PyObject *args)
-{
- PyObject *str_obj;
- if (!PyArg_ParseTuple(args, "O!:asciiupper", &PyBytes_Type, &str_obj))
- return NULL;
- return _asciitransform(str_obj, uppertable, NULL);
-}
-
-static inline PyObject *_dict_new_presized(Py_ssize_t expected_size)
-{
- /* _PyDict_NewPresized expects a minused parameter, but it actually
- creates a dictionary that's the nearest power of two bigger than the
- parameter. For example, with the initial minused = 1000, the
- dictionary created has size 1024. Of course in a lot of cases that
- can be greater than the maximum load factor Python's dict object
- expects (= 2/3), so as soon as we cross the threshold we'll resize
- anyway. So create a dictionary that's at least 3/2 the size. */
- return _PyDict_NewPresized(((1 + expected_size) / 2) * 3);
-}
-
-static PyObject *dict_new_presized(PyObject *self, PyObject *args)
-{
- Py_ssize_t expected_size;
-
- if (!PyArg_ParseTuple(args, "n:make_presized_dict", &expected_size))
- return NULL;
-
- return _dict_new_presized(expected_size);
-}
-
-static PyObject *make_file_foldmap(PyObject *self, PyObject *args)
-{
- PyObject *dmap, *spec_obj, *normcase_fallback;
- PyObject *file_foldmap = NULL;
- enum normcase_spec spec;
- PyObject *k, *v;
- dirstateTupleObject *tuple;
- Py_ssize_t pos = 0;
- const char *table;
-
- if (!PyArg_ParseTuple(args, "O!O!O!:make_file_foldmap",
- &PyDict_Type, &dmap,
- &PyInt_Type, &spec_obj,
- &PyFunction_Type, &normcase_fallback))
- goto quit;
-
- spec = (int)PyInt_AS_LONG(spec_obj);
- switch (spec) {
- case NORMCASE_LOWER:
- table = lowertable;
- break;
- case NORMCASE_UPPER:
- table = uppertable;
- break;
- case NORMCASE_OTHER:
- table = NULL;
- break;
- default:
- PyErr_SetString(PyExc_TypeError, "invalid normcasespec");
- goto quit;
- }
-
- /* Add some more entries to deal with additions outside this
- function. */
- file_foldmap = _dict_new_presized((PyDict_Size(dmap) / 10) * 11);
- if (file_foldmap == NULL)
- goto quit;
-
- while (PyDict_Next(dmap, &pos, &k, &v)) {
- if (!dirstate_tuple_check(v)) {
- PyErr_SetString(PyExc_TypeError,
- "expected a dirstate tuple");
- goto quit;
- }
-
- tuple = (dirstateTupleObject *)v;
- if (tuple->state != 'r') {
- PyObject *normed;
- if (table != NULL) {
- normed = _asciitransform(k, table,
- normcase_fallback);
- } else {
- normed = PyObject_CallFunctionObjArgs(
- normcase_fallback, k, NULL);
- }
-
- if (normed == NULL)
- goto quit;
- if (PyDict_SetItem(file_foldmap, normed, k) == -1) {
- Py_DECREF(normed);
- goto quit;
- }
- Py_DECREF(normed);
- }
- }
- return file_foldmap;
-quit:
- Py_XDECREF(file_foldmap);
- return NULL;
-}
-
-/*
- * This code assumes that a manifest is stitched together with newline
- * ('\n') characters.
- */
-static PyObject *parse_manifest(PyObject *self, PyObject *args)
-{
- PyObject *mfdict, *fdict;
- char *str, *start, *end;
- int len;
-
- if (!PyArg_ParseTuple(args, "O!O!s#:parse_manifest",
- &PyDict_Type, &mfdict,
- &PyDict_Type, &fdict,
- &str, &len))
- goto quit;
-
- start = str;
- end = str + len;
- while (start < end) {
- PyObject *file = NULL, *node = NULL;
- PyObject *flags = NULL;
- char *zero = NULL, *newline = NULL;
- ptrdiff_t nlen;
-
- zero = memchr(start, '\0', end - start);
- if (!zero) {
- PyErr_SetString(PyExc_ValueError,
- "manifest entry has no separator");
- goto quit;
- }
-
- newline = memchr(zero + 1, '\n', end - (zero + 1));
- if (!newline) {
- PyErr_SetString(PyExc_ValueError,
- "manifest contains trailing garbage");
- goto quit;
- }
-
- file = PyBytes_FromStringAndSize(start, zero - start);
-
- if (!file)
- goto bail;
-
- nlen = newline - zero - 1;
-
- node = unhexlify(zero + 1, nlen > 40 ? 40 : (int)nlen);
- if (!node)
- goto bail;
-
- if (nlen > 40) {
- flags = PyBytes_FromStringAndSize(zero + 41,
- nlen - 40);
- if (!flags)
- goto bail;
-
- if (PyDict_SetItem(fdict, file, flags) == -1)
- goto bail;
- }
-
- if (PyDict_SetItem(mfdict, file, node) == -1)
- goto bail;
-
- start = newline + 1;
-
- Py_XDECREF(flags);
- Py_XDECREF(node);
- Py_XDECREF(file);
- continue;
- bail:
- Py_XDECREF(flags);
- Py_XDECREF(node);
- Py_XDECREF(file);
- goto quit;
- }
-
- Py_INCREF(Py_None);
- return Py_None;
-quit:
- return NULL;
-}
-
-static inline dirstateTupleObject *make_dirstate_tuple(char state, int mode,
- int size, int mtime)
-{
- dirstateTupleObject *t = PyObject_New(dirstateTupleObject,
- &dirstateTupleType);
- if (!t)
- return NULL;
- t->state = state;
- t->mode = mode;
- t->size = size;
- t->mtime = mtime;
- return t;
-}
-
-static PyObject *dirstate_tuple_new(PyTypeObject *subtype, PyObject *args,
- PyObject *kwds)
-{
- /* We do all the initialization here and not a tp_init function because
- * dirstate_tuple is immutable. */
- dirstateTupleObject *t;
- char state;
- int size, mode, mtime;
- if (!PyArg_ParseTuple(args, "ciii", &state, &mode, &size, &mtime))
- return NULL;
-
- t = (dirstateTupleObject *)subtype->tp_alloc(subtype, 1);
- if (!t)
- return NULL;
- t->state = state;
- t->mode = mode;
- t->size = size;
- t->mtime = mtime;
-
- return (PyObject *)t;
-}
-
-static void dirstate_tuple_dealloc(PyObject *o)
-{
- PyObject_Del(o);
-}
-
-static Py_ssize_t dirstate_tuple_length(PyObject *o)
-{
- return 4;
-}
-
-static PyObject *dirstate_tuple_item(PyObject *o, Py_ssize_t i)
-{
- dirstateTupleObject *t = (dirstateTupleObject *)o;
- switch (i) {
- case 0:
- return PyBytes_FromStringAndSize(&t->state, 1);
- case 1:
- return PyInt_FromLong(t->mode);
- case 2:
- return PyInt_FromLong(t->size);
- case 3:
- return PyInt_FromLong(t->mtime);
- default:
- PyErr_SetString(PyExc_IndexError, "index out of range");
- return NULL;
- }
-}
-
-static PySequenceMethods dirstate_tuple_sq = {
- dirstate_tuple_length, /* sq_length */
- 0, /* sq_concat */
- 0, /* sq_repeat */
- dirstate_tuple_item, /* sq_item */
- 0, /* sq_ass_item */
- 0, /* sq_contains */
- 0, /* sq_inplace_concat */
- 0 /* sq_inplace_repeat */
-};
-
-PyTypeObject dirstateTupleType = {
- PyVarObject_HEAD_INIT(NULL, 0)
- "dirstate_tuple", /* tp_name */
- sizeof(dirstateTupleObject),/* tp_basicsize */
- 0, /* tp_itemsize */
- (destructor)dirstate_tuple_dealloc, /* tp_dealloc */
- 0, /* tp_print */
- 0, /* tp_getattr */
- 0, /* tp_setattr */
- 0, /* tp_compare */
- 0, /* tp_repr */
- 0, /* tp_as_number */
- &dirstate_tuple_sq, /* tp_as_sequence */
- 0, /* tp_as_mapping */
- 0, /* tp_hash */
- 0, /* tp_call */
- 0, /* tp_str */
- 0, /* tp_getattro */
- 0, /* tp_setattro */
- 0, /* tp_as_buffer */
- Py_TPFLAGS_DEFAULT, /* tp_flags */
- "dirstate tuple", /* tp_doc */
- 0, /* tp_traverse */
- 0, /* tp_clear */
- 0, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- 0, /* tp_iter */
- 0, /* tp_iternext */
- 0, /* tp_methods */
- 0, /* tp_members */
- 0, /* tp_getset */
- 0, /* tp_base */
- 0, /* tp_dict */
- 0, /* tp_descr_get */
- 0, /* tp_descr_set */
- 0, /* tp_dictoffset */
- 0, /* tp_init */
- 0, /* tp_alloc */
- dirstate_tuple_new, /* tp_new */
-};
-
-static PyObject *parse_dirstate(PyObject *self, PyObject *args)
-{
- PyObject *dmap, *cmap, *parents = NULL, *ret = NULL;
- PyObject *fname = NULL, *cname = NULL, *entry = NULL;
- char state, *cur, *str, *cpos;
- int mode, size, mtime;
- unsigned int flen, len, pos = 40;
- int readlen;
-
- if (!PyArg_ParseTuple(args, "O!O!s#:parse_dirstate",
- &PyDict_Type, &dmap,
- &PyDict_Type, &cmap,
- &str, &readlen))
- goto quit;
-
- len = readlen;
-
- /* read parents */
- if (len < 40) {
- PyErr_SetString(
- PyExc_ValueError, "too little data for parents");
- goto quit;
- }
-
- parents = Py_BuildValue("s#s#", str, 20, str + 20, 20);
- if (!parents)
- goto quit;
-
- /* read filenames */
- while (pos >= 40 && pos < len) {
- if (pos + 17 > len) {
- PyErr_SetString(PyExc_ValueError,
- "overflow in dirstate");
- goto quit;
- }
- cur = str + pos;
- /* unpack header */
- state = *cur;
- mode = getbe32(cur + 1);
- size = getbe32(cur + 5);
- mtime = getbe32(cur + 9);
- flen = getbe32(cur + 13);
- pos += 17;
- cur += 17;
- if (flen > len - pos) {
- PyErr_SetString(PyExc_ValueError, "overflow in dirstate");
- goto quit;
- }
-
- entry = (PyObject *)make_dirstate_tuple(state, mode, size,
- mtime);
- cpos = memchr(cur, 0, flen);
- if (cpos) {
- fname = PyBytes_FromStringAndSize(cur, cpos - cur);
- cname = PyBytes_FromStringAndSize(cpos + 1,
- flen - (cpos - cur) - 1);
- if (!fname || !cname ||
- PyDict_SetItem(cmap, fname, cname) == -1 ||
- PyDict_SetItem(dmap, fname, entry) == -1)
- goto quit;
- Py_DECREF(cname);
- } else {
- fname = PyBytes_FromStringAndSize(cur, flen);
- if (!fname ||
- PyDict_SetItem(dmap, fname, entry) == -1)
- goto quit;
- }
- Py_DECREF(fname);
- Py_DECREF(entry);
- fname = cname = entry = NULL;
- pos += flen;
- }
-
- ret = parents;
- Py_INCREF(ret);
-quit:
- Py_XDECREF(fname);
- Py_XDECREF(cname);
- Py_XDECREF(entry);
- Py_XDECREF(parents);
- return ret;
-}
-
-/*
- * Build a set of non-normal and other parent entries from the dirstate dmap
-*/
-static PyObject *nonnormalotherparententries(PyObject *self, PyObject *args) {
- PyObject *dmap, *fname, *v;
- PyObject *nonnset = NULL, *otherpset = NULL, *result = NULL;
- Py_ssize_t pos;
-
- if (!PyArg_ParseTuple(args, "O!:nonnormalentries",
- &PyDict_Type, &dmap))
- goto bail;
-
- nonnset = PySet_New(NULL);
- if (nonnset == NULL)
- goto bail;
-
- otherpset = PySet_New(NULL);
- if (otherpset == NULL)
- goto bail;
-
- pos = 0;
- while (PyDict_Next(dmap, &pos, &fname, &v)) {
- dirstateTupleObject *t;
- if (!dirstate_tuple_check(v)) {
- PyErr_SetString(PyExc_TypeError,
- "expected a dirstate tuple");
- goto bail;
- }
- t = (dirstateTupleObject *)v;
-
- if (t->state == 'n' && t->size == -2) {
- if (PySet_Add(otherpset, fname) == -1) {
- goto bail;
- }
- }
-
- if (t->state == 'n' && t->mtime != -1)
- continue;
- if (PySet_Add(nonnset, fname) == -1)
- goto bail;
- }
-
- result = Py_BuildValue("(OO)", nonnset, otherpset);
- if (result == NULL)
- goto bail;
- Py_DECREF(nonnset);
- Py_DECREF(otherpset);
- return result;
-bail:
- Py_XDECREF(nonnset);
- Py_XDECREF(otherpset);
- Py_XDECREF(result);
- return NULL;
-}
-
-/*
- * Efficiently pack a dirstate object into its on-disk format.
- */
-static PyObject *pack_dirstate(PyObject *self, PyObject *args)
-{
- PyObject *packobj = NULL;
- PyObject *map, *copymap, *pl, *mtime_unset = NULL;
- Py_ssize_t nbytes, pos, l;
- PyObject *k, *v = NULL, *pn;
- char *p, *s;
- int now;
-
- if (!PyArg_ParseTuple(args, "O!O!Oi:pack_dirstate",
- &PyDict_Type, &map, &PyDict_Type, ©map,
- &pl, &now))
- return NULL;
-
- if (!PySequence_Check(pl) || PySequence_Size(pl) != 2) {
- PyErr_SetString(PyExc_TypeError, "expected 2-element sequence");
- return NULL;
- }
-
- /* Figure out how much we need to allocate. */
- for (nbytes = 40, pos = 0; PyDict_Next(map, &pos, &k, &v);) {
- PyObject *c;
- if (!PyBytes_Check(k)) {
- PyErr_SetString(PyExc_TypeError, "expected string key");
- goto bail;
- }
- nbytes += PyBytes_GET_SIZE(k) + 17;
- c = PyDict_GetItem(copymap, k);
- if (c) {
- if (!PyBytes_Check(c)) {
- PyErr_SetString(PyExc_TypeError,
- "expected string key");
- goto bail;
- }
- nbytes += PyBytes_GET_SIZE(c) + 1;
- }
- }
-
- packobj = PyBytes_FromStringAndSize(NULL, nbytes);
- if (packobj == NULL)
- goto bail;
-
- p = PyBytes_AS_STRING(packobj);
-
- pn = PySequence_ITEM(pl, 0);
- if (PyBytes_AsStringAndSize(pn, &s, &l) == -1 || l != 20) {
- PyErr_SetString(PyExc_TypeError, "expected a 20-byte hash");
- goto bail;
- }
- memcpy(p, s, l);
- p += 20;
- pn = PySequence_ITEM(pl, 1);
- if (PyBytes_AsStringAndSize(pn, &s, &l) == -1 || l != 20) {
- PyErr_SetString(PyExc_TypeError, "expected a 20-byte hash");
- goto bail;
- }
- memcpy(p, s, l);
- p += 20;
-
- for (pos = 0; PyDict_Next(map, &pos, &k, &v); ) {
- dirstateTupleObject *tuple;
- char state;
- int mode, size, mtime;
- Py_ssize_t len, l;
- PyObject *o;
- char *t;
-
- if (!dirstate_tuple_check(v)) {
- PyErr_SetString(PyExc_TypeError,
- "expected a dirstate tuple");
- goto bail;
- }
- tuple = (dirstateTupleObject *)v;
-
- state = tuple->state;
- mode = tuple->mode;
- size = tuple->size;
- mtime = tuple->mtime;
- if (state == 'n' && mtime == now) {
- /* See pure/parsers.py:pack_dirstate for why we do
- * this. */
- mtime = -1;
- mtime_unset = (PyObject *)make_dirstate_tuple(
- state, mode, size, mtime);
- if (!mtime_unset)
- goto bail;
- if (PyDict_SetItem(map, k, mtime_unset) == -1)
- goto bail;
- Py_DECREF(mtime_unset);
- mtime_unset = NULL;
- }
- *p++ = state;
- putbe32((uint32_t)mode, p);
- putbe32((uint32_t)size, p + 4);
- putbe32((uint32_t)mtime, p + 8);
- t = p + 12;
- p += 16;
- len = PyBytes_GET_SIZE(k);
- memcpy(p, PyBytes_AS_STRING(k), len);
- p += len;
- o = PyDict_GetItem(copymap, k);
- if (o) {
- *p++ = '\0';
- l = PyBytes_GET_SIZE(o);
- memcpy(p, PyBytes_AS_STRING(o), l);
- p += l;
- len += l + 1;
- }
- putbe32((uint32_t)len, t);
- }
-
- pos = p - PyBytes_AS_STRING(packobj);
- if (pos != nbytes) {
- PyErr_Format(PyExc_SystemError, "bad dirstate size: %ld != %ld",
- (long)pos, (long)nbytes);
- goto bail;
- }
-
- return packobj;
-bail:
- Py_XDECREF(mtime_unset);
- Py_XDECREF(packobj);
- Py_XDECREF(v);
- return NULL;
-}
-
-/*
- * A base-16 trie for fast node->rev mapping.
- *
- * Positive value is index of the next node in the trie
- * Negative value is a leaf: -(rev + 1)
- * Zero is empty
- */
-typedef struct {
- int children[16];
-} nodetree;
-
-/*
- * This class has two behaviors.
- *
- * When used in a list-like way (with integer keys), we decode an
- * entry in a RevlogNG index file on demand. Our last entry is a
- * sentinel, always a nullid. We have limited support for
- * integer-keyed insert and delete, only at elements right before the
- * sentinel.
- *
- * With string keys, we lazily perform a reverse mapping from node to
- * rev, using a base-16 trie.
- */
-typedef struct {
- PyObject_HEAD
- /* Type-specific fields go here. */
- PyObject *data; /* raw bytes of index */
- Py_buffer buf; /* buffer of data */
- PyObject **cache; /* cached tuples */
- const char **offsets; /* populated on demand */
- Py_ssize_t raw_length; /* original number of elements */
- Py_ssize_t length; /* current number of elements */
- PyObject *added; /* populated on demand */
- PyObject *headrevs; /* cache, invalidated on changes */
- PyObject *filteredrevs;/* filtered revs set */
- nodetree *nt; /* base-16 trie */
- unsigned ntlength; /* # nodes in use */
- unsigned ntcapacity; /* # nodes allocated */
- int ntdepth; /* maximum depth of tree */
- int ntsplits; /* # splits performed */
- int ntrev; /* last rev scanned */
- int ntlookups; /* # lookups */
- int ntmisses; /* # lookups that miss the cache */
- int inlined;
-} indexObject;
-
-static Py_ssize_t index_length(const indexObject *self)
-{
- if (self->added == NULL)
- return self->length;
- return self->length + PyList_GET_SIZE(self->added);
-}
-
-static PyObject *nullentry;
-static const char nullid[20];
-
-static Py_ssize_t inline_scan(indexObject *self, const char **offsets);
-
-#if LONG_MAX == 0x7fffffffL
-static char *tuple_format = "Kiiiiiis#";
-#else
-static char *tuple_format = "kiiiiiis#";
-#endif
-
-/* A RevlogNG v1 index entry is 64 bytes long. */
-static const long v1_hdrsize = 64;
-
-/*
- * Return a pointer to the beginning of a RevlogNG record.
- */
-static const char *index_deref(indexObject *self, Py_ssize_t pos)
-{
- if (self->inlined && pos > 0) {
- if (self->offsets == NULL) {
- self->offsets = PyMem_Malloc(self->raw_length *
- sizeof(*self->offsets));
- if (self->offsets == NULL)
- return (const char *)PyErr_NoMemory();
- inline_scan(self, self->offsets);
- }
- return self->offsets[pos];
- }
-
- return (const char *)(self->buf.buf) + pos * v1_hdrsize;
-}
-
-static inline int index_get_parents(indexObject *self, Py_ssize_t rev,
- int *ps, int maxrev)
-{
- if (rev >= self->length - 1) {
- PyObject *tuple = PyList_GET_ITEM(self->added,
- rev - self->length + 1);
- ps[0] = (int)PyInt_AS_LONG(PyTuple_GET_ITEM(tuple, 5));
- ps[1] = (int)PyInt_AS_LONG(PyTuple_GET_ITEM(tuple, 6));
- } else {
- const char *data = index_deref(self, rev);
- ps[0] = getbe32(data + 24);
- ps[1] = getbe32(data + 28);
- }
- /* If index file is corrupted, ps[] may point to invalid revisions. So
- * there is a risk of buffer overflow to trust them unconditionally. */
- if (ps[0] > maxrev || ps[1] > maxrev) {
- PyErr_SetString(PyExc_ValueError, "parent out of range");
- return -1;
- }
- return 0;
-}
-
-
-/*
- * RevlogNG format (all in big endian, data may be inlined):
- * 6 bytes: offset
- * 2 bytes: flags
- * 4 bytes: compressed length
- * 4 bytes: uncompressed length
- * 4 bytes: base revision
- * 4 bytes: link revision
- * 4 bytes: parent 1 revision
- * 4 bytes: parent 2 revision
- * 32 bytes: nodeid (only 20 bytes used)
- */
-static PyObject *index_get(indexObject *self, Py_ssize_t pos)
-{
- uint64_t offset_flags;
- int comp_len, uncomp_len, base_rev, link_rev, parent_1, parent_2;
- const char *c_node_id;
- const char *data;
- Py_ssize_t length = index_length(self);
- PyObject *entry;
-
- if (pos < 0)
- pos += length;
-
- if (pos < 0 || pos >= length) {
- PyErr_SetString(PyExc_IndexError, "revlog index out of range");
- return NULL;
- }
-
- if (pos == length - 1) {
- Py_INCREF(nullentry);
- return nullentry;
- }
-
- if (pos >= self->length - 1) {
- PyObject *obj;
- obj = PyList_GET_ITEM(self->added, pos - self->length + 1);
- Py_INCREF(obj);
- return obj;
- }
-
- if (self->cache) {
- if (self->cache[pos]) {
- Py_INCREF(self->cache[pos]);
- return self->cache[pos];
- }
- } else {
- self->cache = calloc(self->raw_length, sizeof(PyObject *));
- if (self->cache == NULL)
- return PyErr_NoMemory();
- }
-
- data = index_deref(self, pos);
- if (data == NULL)
- return NULL;
-
- offset_flags = getbe32(data + 4);
- if (pos == 0) /* mask out version number for the first entry */
- offset_flags &= 0xFFFF;
- else {
- uint32_t offset_high = getbe32(data);
- offset_flags |= ((uint64_t)offset_high) << 32;
- }
-
- comp_len = getbe32(data + 8);
- uncomp_len = getbe32(data + 12);
- base_rev = getbe32(data + 16);
- link_rev = getbe32(data + 20);
- parent_1 = getbe32(data + 24);
- parent_2 = getbe32(data + 28);
- c_node_id = data + 32;
-
- entry = Py_BuildValue(tuple_format, offset_flags, comp_len,
- uncomp_len, base_rev, link_rev,
- parent_1, parent_2, c_node_id, 20);
-
- if (entry) {
- PyObject_GC_UnTrack(entry);
- Py_INCREF(entry);
- }
-
- self->cache[pos] = entry;
-
- return entry;
-}
-
-/*
- * Return the 20-byte SHA of the node corresponding to the given rev.
- */
-static const char *index_node(indexObject *self, Py_ssize_t pos)
-{
- Py_ssize_t length = index_length(self);
- const char *data;
-
- if (pos == length - 1 || pos == INT_MAX)
- return nullid;
-
- if (pos >= length)
- return NULL;
-
- if (pos >= self->length - 1) {
- PyObject *tuple, *str;
- tuple = PyList_GET_ITEM(self->added, pos - self->length + 1);
- str = PyTuple_GetItem(tuple, 7);
- return str ? PyBytes_AS_STRING(str) : NULL;
- }
-
- data = index_deref(self, pos);
- return data ? data + 32 : NULL;
-}
-
-static int nt_insert(indexObject *self, const char *node, int rev);
-
-static int node_check(PyObject *obj, char **node, Py_ssize_t *nodelen)
-{
- if (PyBytes_AsStringAndSize(obj, node, nodelen) == -1)
- return -1;
- if (*nodelen == 20)
- return 0;
- PyErr_SetString(PyExc_ValueError, "20-byte hash required");
- return -1;
-}
-
-static PyObject *index_insert(indexObject *self, PyObject *args)
-{
- PyObject *obj;
- char *node;
- int index;
- Py_ssize_t len, nodelen;
-
- if (!PyArg_ParseTuple(args, "iO", &index, &obj))
- return NULL;
-
- if (!PyTuple_Check(obj) || PyTuple_GET_SIZE(obj) != 8) {
- PyErr_SetString(PyExc_TypeError, "8-tuple required");
- return NULL;
- }
-
- if (node_check(PyTuple_GET_ITEM(obj, 7), &node, &nodelen) == -1)
- return NULL;
-
- len = index_length(self);
-
- if (index < 0)
- index += len;
-
- if (index != len - 1) {
- PyErr_SetString(PyExc_IndexError,
- "insert only supported at index -1");
- return NULL;
- }
-
- if (self->added == NULL) {
- self->added = PyList_New(0);
- if (self->added == NULL)
- return NULL;
- }
-
- if (PyList_Append(self->added, obj) == -1)
- return NULL;
-
- if (self->nt)
- nt_insert(self, node, index);
-
- Py_CLEAR(self->headrevs);
- Py_RETURN_NONE;
-}
-
-static void _index_clearcaches(indexObject *self)
-{
- if (self->cache) {
- Py_ssize_t i;
-
- for (i = 0; i < self->raw_length; i++)
- Py_CLEAR(self->cache[i]);
- free(self->cache);
- self->cache = NULL;
- }
- if (self->offsets) {
- PyMem_Free(self->offsets);
- self->offsets = NULL;
- }
- if (self->nt) {
- free(self->nt);
- self->nt = NULL;
- }
- Py_CLEAR(self->headrevs);
-}
-
-static PyObject *index_clearcaches(indexObject *self)
-{
- _index_clearcaches(self);
- self->ntlength = self->ntcapacity = 0;
- self->ntdepth = self->ntsplits = 0;
- self->ntrev = -1;
- self->ntlookups = self->ntmisses = 0;
- Py_RETURN_NONE;
-}
-
-static PyObject *index_stats(indexObject *self)
-{
- PyObject *obj = PyDict_New();
- PyObject *t = NULL;
-
- if (obj == NULL)
- return NULL;
-
-#define istat(__n, __d) \
- do { \
- t = PyInt_FromSsize_t(self->__n); \
- if (!t) \
- goto bail; \
- if (PyDict_SetItemString(obj, __d, t) == -1) \
- goto bail; \
- Py_DECREF(t); \
- } while (0)
-
- if (self->added) {
- Py_ssize_t len = PyList_GET_SIZE(self->added);
- t = PyInt_FromSsize_t(len);
- if (!t)
- goto bail;
- if (PyDict_SetItemString(obj, "index entries added", t) == -1)
- goto bail;
- Py_DECREF(t);
- }
-
- if (self->raw_length != self->length - 1)
- istat(raw_length, "revs on disk");
- istat(length, "revs in memory");
- istat(ntcapacity, "node trie capacity");
- istat(ntdepth, "node trie depth");
- istat(ntlength, "node trie count");
- istat(ntlookups, "node trie lookups");
- istat(ntmisses, "node trie misses");
- istat(ntrev, "node trie last rev scanned");
- istat(ntsplits, "node trie splits");
-
-#undef istat
-
- return obj;
-
-bail:
- Py_XDECREF(obj);
- Py_XDECREF(t);
- return NULL;
-}
-
-/*
- * When we cache a list, we want to be sure the caller can't mutate
- * the cached copy.
- */
-static PyObject *list_copy(PyObject *list)
-{
- Py_ssize_t len = PyList_GET_SIZE(list);
- PyObject *newlist = PyList_New(len);
- Py_ssize_t i;
-
- if (newlist == NULL)
- return NULL;
-
- for (i = 0; i < len; i++) {
- PyObject *obj = PyList_GET_ITEM(list, i);
- Py_INCREF(obj);
- PyList_SET_ITEM(newlist, i, obj);
- }
-
- return newlist;
-}
-
-static int check_filter(PyObject *filter, Py_ssize_t arg) {
- if (filter) {
- PyObject *arglist, *result;
- int isfiltered;
-
- arglist = Py_BuildValue("(n)", arg);
- if (!arglist) {
- return -1;
- }
-
- result = PyEval_CallObject(filter, arglist);
- Py_DECREF(arglist);
- if (!result) {
- return -1;
- }
-
- /* PyObject_IsTrue returns 1 if true, 0 if false, -1 if error,
- * same as this function, so we can just return it directly.*/
- isfiltered = PyObject_IsTrue(result);
- Py_DECREF(result);
- return isfiltered;
- } else {
- return 0;
- }
-}
-
-static Py_ssize_t add_roots_get_min(indexObject *self, PyObject *list,
- Py_ssize_t marker, char *phases)
-{
- PyObject *iter = NULL;
- PyObject *iter_item = NULL;
- Py_ssize_t min_idx = index_length(self) + 1;
- long iter_item_long;
-
- if (PyList_GET_SIZE(list) != 0) {
- iter = PyObject_GetIter(list);
- if (iter == NULL)
- return -2;
- while ((iter_item = PyIter_Next(iter)))
- {
- iter_item_long = PyInt_AS_LONG(iter_item);
- Py_DECREF(iter_item);
- if (iter_item_long < min_idx)
- min_idx = iter_item_long;
- phases[iter_item_long] = marker;
- }
- Py_DECREF(iter);
- }
-
- return min_idx;
-}
-
-static inline void set_phase_from_parents(char *phases, int parent_1,
- int parent_2, Py_ssize_t i)
-{
- if (parent_1 >= 0 && phases[parent_1] > phases[i])
- phases[i] = phases[parent_1];
- if (parent_2 >= 0 && phases[parent_2] > phases[i])
- phases[i] = phases[parent_2];
-}
-
-static PyObject *reachableroots2(indexObject *self, PyObject *args)
-{
-
- /* Input */
- long minroot;
- PyObject *includepatharg = NULL;
- int includepath = 0;
- /* heads and roots are lists */
- PyObject *heads = NULL;
- PyObject *roots = NULL;
- PyObject *reachable = NULL;
-
- PyObject *val;
- Py_ssize_t len = index_length(self) - 1;
- long revnum;
- Py_ssize_t k;
- Py_ssize_t i;
- Py_ssize_t l;
- int r;
- int parents[2];
-
- /* Internal data structure:
- * tovisit: array of length len+1 (all revs + nullrev), filled upto lentovisit
- * revstates: array of length len+1 (all revs + nullrev) */
- int *tovisit = NULL;
- long lentovisit = 0;
- enum { RS_SEEN = 1, RS_ROOT = 2, RS_REACHABLE = 4 };
- char *revstates = NULL;
-
- /* Get arguments */
- if (!PyArg_ParseTuple(args, "lO!O!O!", &minroot, &PyList_Type, &heads,
- &PyList_Type, &roots,
- &PyBool_Type, &includepatharg))
- goto bail;
-
- if (includepatharg == Py_True)
- includepath = 1;
-
- /* Initialize return set */
- reachable = PyList_New(0);
- if (reachable == NULL)
- goto bail;
-
- /* Initialize internal datastructures */
- tovisit = (int *)malloc((len + 1) * sizeof(int));
- if (tovisit == NULL) {
- PyErr_NoMemory();
- goto bail;
- }
-
- revstates = (char *)calloc(len + 1, 1);
- if (revstates == NULL) {
- PyErr_NoMemory();
- goto bail;
- }
-
- l = PyList_GET_SIZE(roots);
- for (i = 0; i < l; i++) {
- revnum = PyInt_AsLong(PyList_GET_ITEM(roots, i));
- if (revnum == -1 && PyErr_Occurred())
- goto bail;
- /* If root is out of range, e.g. wdir(), it must be unreachable
- * from heads. So we can just ignore it. */
- if (revnum + 1 < 0 || revnum + 1 >= len + 1)
- continue;
- revstates[revnum + 1] |= RS_ROOT;
- }
-
- /* Populate tovisit with all the heads */
- l = PyList_GET_SIZE(heads);
- for (i = 0; i < l; i++) {
- revnum = PyInt_AsLong(PyList_GET_ITEM(heads, i));
- if (revnum == -1 && PyErr_Occurred())
- goto bail;
- if (revnum + 1 < 0 || revnum + 1 >= len + 1) {
- PyErr_SetString(PyExc_IndexError, "head out of range");
- goto bail;
- }
- if (!(revstates[revnum + 1] & RS_SEEN)) {
- tovisit[lentovisit++] = (int)revnum;
- revstates[revnum + 1] |= RS_SEEN;
- }
- }
-
- /* Visit the tovisit list and find the reachable roots */
- k = 0;
- while (k < lentovisit) {
- /* Add the node to reachable if it is a root*/
- revnum = tovisit[k++];
- if (revstates[revnum + 1] & RS_ROOT) {
- revstates[revnum + 1] |= RS_REACHABLE;
- val = PyInt_FromLong(revnum);
- if (val == NULL)
- goto bail;
- r = PyList_Append(reachable, val);
- Py_DECREF(val);
- if (r < 0)
- goto bail;
- if (includepath == 0)
- continue;
- }
-
- /* Add its parents to the list of nodes to visit */
- if (revnum == -1)
- continue;
- r = index_get_parents(self, revnum, parents, (int)len - 1);
- if (r < 0)
- goto bail;
- for (i = 0; i < 2; i++) {
- if (!(revstates[parents[i] + 1] & RS_SEEN)
- && parents[i] >= minroot) {
- tovisit[lentovisit++] = parents[i];
- revstates[parents[i] + 1] |= RS_SEEN;
- }
- }
- }
-
- /* Find all the nodes in between the roots we found and the heads
- * and add them to the reachable set */
- if (includepath == 1) {
- long minidx = minroot;
- if (minidx < 0)
- minidx = 0;
- for (i = minidx; i < len; i++) {
- if (!(revstates[i + 1] & RS_SEEN))
- continue;
- r = index_get_parents(self, i, parents, (int)len - 1);
- /* Corrupted index file, error is set from
- * index_get_parents */
- if (r < 0)
- goto bail;
- if (((revstates[parents[0] + 1] |
- revstates[parents[1] + 1]) & RS_REACHABLE)
- && !(revstates[i + 1] & RS_REACHABLE)) {
- revstates[i + 1] |= RS_REACHABLE;
- val = PyInt_FromLong(i);
- if (val == NULL)
- goto bail;
- r = PyList_Append(reachable, val);
- Py_DECREF(val);
- if (r < 0)
- goto bail;
- }
- }
- }
-
- free(revstates);
- free(tovisit);
- return reachable;
-bail:
- Py_XDECREF(reachable);
- free(revstates);
- free(tovisit);
- return NULL;
-}
-
-static PyObject *compute_phases_map_sets(indexObject *self, PyObject *args)
-{
- PyObject *roots = Py_None;
- PyObject *ret = NULL;
- PyObject *phaseslist = NULL;
- PyObject *phaseroots = NULL;
- PyObject *phaseset = NULL;
- PyObject *phasessetlist = NULL;
- PyObject *rev = NULL;
- Py_ssize_t len = index_length(self) - 1;
- Py_ssize_t numphase = 0;
- Py_ssize_t minrevallphases = 0;
- Py_ssize_t minrevphase = 0;
- Py_ssize_t i = 0;
- char *phases = NULL;
- long phase;
-
- if (!PyArg_ParseTuple(args, "O", &roots))
- goto done;
- if (roots == NULL || !PyList_Check(roots))
- goto done;
-
- phases = calloc(len, 1); /* phase per rev: {0: public, 1: draft, 2: secret} */
- if (phases == NULL) {
- PyErr_NoMemory();
- goto done;
- }
- /* Put the phase information of all the roots in phases */
- numphase = PyList_GET_SIZE(roots)+1;
- minrevallphases = len + 1;
- phasessetlist = PyList_New(numphase);
- if (phasessetlist == NULL)
- goto done;
-
- PyList_SET_ITEM(phasessetlist, 0, Py_None);
- Py_INCREF(Py_None);
-
- for (i = 0; i < numphase-1; i++) {
- phaseroots = PyList_GET_ITEM(roots, i);
- phaseset = PySet_New(NULL);
- if (phaseset == NULL)
- goto release;
- PyList_SET_ITEM(phasessetlist, i+1, phaseset);
- if (!PyList_Check(phaseroots))
- goto release;
- minrevphase = add_roots_get_min(self, phaseroots, i+1, phases);
- if (minrevphase == -2) /* Error from add_roots_get_min */
- goto release;
- minrevallphases = MIN(minrevallphases, minrevphase);
- }
- /* Propagate the phase information from the roots to the revs */
- if (minrevallphases != -1) {
- int parents[2];
- for (i = minrevallphases; i < len; i++) {
- if (index_get_parents(self, i, parents,
- (int)len - 1) < 0)
- goto release;
- set_phase_from_parents(phases, parents[0], parents[1], i);
- }
- }
- /* Transform phase list to a python list */
- phaseslist = PyList_New(len);
- if (phaseslist == NULL)
- goto release;
- for (i = 0; i < len; i++) {
- PyObject *phaseval;
-
- phase = phases[i];
- /* We only store the sets of phase for non public phase, the public phase
- * is computed as a difference */
- if (phase != 0) {
- phaseset = PyList_GET_ITEM(phasessetlist, phase);
- rev = PyInt_FromLong(i);
- if (rev == NULL)
- goto release;
- PySet_Add(phaseset, rev);
- Py_XDECREF(rev);
- }
- phaseval = PyInt_FromLong(phase);
- if (phaseval == NULL)
- goto release;
- PyList_SET_ITEM(phaseslist, i, phaseval);
- }
- ret = PyTuple_Pack(2, phaseslist, phasessetlist);
-
-release:
- Py_XDECREF(phaseslist);
- Py_XDECREF(phasessetlist);
-done:
- free(phases);
- return ret;
-}
-
-static PyObject *index_headrevs(indexObject *self, PyObject *args)
-{
- Py_ssize_t i, j, len;
- char *nothead = NULL;
- PyObject *heads = NULL;
- PyObject *filter = NULL;
- PyObject *filteredrevs = Py_None;
-
- if (!PyArg_ParseTuple(args, "|O", &filteredrevs)) {
- return NULL;
- }
-
- if (self->headrevs && filteredrevs == self->filteredrevs)
- return list_copy(self->headrevs);
-
- Py_DECREF(self->filteredrevs);
- self->filteredrevs = filteredrevs;
- Py_INCREF(filteredrevs);
-
- if (filteredrevs != Py_None) {
- filter = PyObject_GetAttrString(filteredrevs, "__contains__");
- if (!filter) {
- PyErr_SetString(PyExc_TypeError,
- "filteredrevs has no attribute __contains__");
- goto bail;
- }
- }
-
- len = index_length(self) - 1;
- heads = PyList_New(0);
- if (heads == NULL)
- goto bail;
- if (len == 0) {
- PyObject *nullid = PyInt_FromLong(-1);
- if (nullid == NULL || PyList_Append(heads, nullid) == -1) {
- Py_XDECREF(nullid);
- goto bail;
- }
- goto done;
- }
-
- nothead = calloc(len, 1);
- if (nothead == NULL) {
- PyErr_NoMemory();
- goto bail;
- }
-
- for (i = len - 1; i >= 0; i--) {
- int isfiltered;
- int parents[2];
-
- /* If nothead[i] == 1, it means we've seen an unfiltered child of this
- * node already, and therefore this node is not filtered. So we can skip
- * the expensive check_filter step.
- */
- if (nothead[i] != 1) {
- isfiltered = check_filter(filter, i);
- if (isfiltered == -1) {
- PyErr_SetString(PyExc_TypeError,
- "unable to check filter");
- goto bail;
- }
-
- if (isfiltered) {
- nothead[i] = 1;
- continue;
- }
- }
-
- if (index_get_parents(self, i, parents, (int)len - 1) < 0)
- goto bail;
- for (j = 0; j < 2; j++) {
- if (parents[j] >= 0)
- nothead[parents[j]] = 1;
- }
- }
-
- for (i = 0; i < len; i++) {
- PyObject *head;
-
- if (nothead[i])
- continue;
- head = PyInt_FromSsize_t(i);
- if (head == NULL || PyList_Append(heads, head) == -1) {
- Py_XDECREF(head);
- goto bail;
- }
- }
-
-done:
- self->headrevs = heads;
- Py_XDECREF(filter);
- free(nothead);
- return list_copy(self->headrevs);
-bail:
- Py_XDECREF(filter);
- Py_XDECREF(heads);
- free(nothead);
- return NULL;
-}
-
-static inline int nt_level(const char *node, Py_ssize_t level)
-{
- int v = node[level>>1];
- if (!(level & 1))
- v >>= 4;
- return v & 0xf;
-}
-
-/*
- * Return values:
- *
- * -4: match is ambiguous (multiple candidates)
- * -2: not found
- * rest: valid rev
- */
-static int nt_find(indexObject *self, const char *node, Py_ssize_t nodelen,
- int hex)
-{
- int (*getnybble)(const char *, Py_ssize_t) = hex ? hexdigit : nt_level;
- int level, maxlevel, off;
-
- if (nodelen == 20 && node[0] == '\0' && memcmp(node, nullid, 20) == 0)
- return -1;
-
- if (self->nt == NULL)
- return -2;
-
- if (hex)
- maxlevel = nodelen > 40 ? 40 : (int)nodelen;
- else
- maxlevel = nodelen > 20 ? 40 : ((int)nodelen * 2);
-
- for (level = off = 0; level < maxlevel; level++) {
- int k = getnybble(node, level);
- nodetree *n = &self->nt[off];
- int v = n->children[k];
-
- if (v < 0) {
- const char *n;
- Py_ssize_t i;
-
- v = -(v + 1);
- n = index_node(self, v);
- if (n == NULL)
- return -2;
- for (i = level; i < maxlevel; i++)
- if (getnybble(node, i) != nt_level(n, i))
- return -2;
- return v;
- }
- if (v == 0)
- return -2;
- off = v;
- }
- /* multiple matches against an ambiguous prefix */
- return -4;
-}
-
-static int nt_new(indexObject *self)
-{
- if (self->ntlength == self->ntcapacity) {
- if (self->ntcapacity >= INT_MAX / (sizeof(nodetree) * 2)) {
- PyErr_SetString(PyExc_MemoryError,
- "overflow in nt_new");
- return -1;
- }
- self->ntcapacity *= 2;
- self->nt = realloc(self->nt,
- self->ntcapacity * sizeof(nodetree));
- if (self->nt == NULL) {
- PyErr_SetString(PyExc_MemoryError, "out of memory");
- return -1;
- }
- memset(&self->nt[self->ntlength], 0,
- sizeof(nodetree) * (self->ntcapacity - self->ntlength));
- }
- return self->ntlength++;
-}
-
-static int nt_insert(indexObject *self, const char *node, int rev)
-{
- int level = 0;
- int off = 0;
-
- while (level < 40) {
- int k = nt_level(node, level);
- nodetree *n;
- int v;
-
- n = &self->nt[off];
- v = n->children[k];
-
- if (v == 0) {
- n->children[k] = -rev - 1;
- return 0;
- }
- if (v < 0) {
- const char *oldnode = index_node(self, -(v + 1));
- int noff;
-
- if (!oldnode || !memcmp(oldnode, node, 20)) {
- n->children[k] = -rev - 1;
- return 0;
- }
- noff = nt_new(self);
- if (noff == -1)
- return -1;
- /* self->nt may have been changed by realloc */
- self->nt[off].children[k] = noff;
- off = noff;
- n = &self->nt[off];
- n->children[nt_level(oldnode, ++level)] = v;
- if (level > self->ntdepth)
- self->ntdepth = level;
- self->ntsplits += 1;
- } else {
- level += 1;
- off = v;
- }
- }
-
- return -1;
-}
-
-static int nt_init(indexObject *self)
-{
- if (self->nt == NULL) {
- if ((size_t)self->raw_length > INT_MAX / sizeof(nodetree)) {
- PyErr_SetString(PyExc_ValueError, "overflow in nt_init");
- return -1;
- }
- self->ntcapacity = self->raw_length < 4
- ? 4 : (int)self->raw_length / 2;
-
- self->nt = calloc(self->ntcapacity, sizeof(nodetree));
- if (self->nt == NULL) {
- PyErr_NoMemory();
- return -1;
- }
- self->ntlength = 1;
- self->ntrev = (int)index_length(self) - 1;
- self->ntlookups = 1;
- self->ntmisses = 0;
- if (nt_insert(self, nullid, INT_MAX) == -1)
- return -1;
- }
- return 0;
-}
-
-/*
- * Return values:
- *
- * -3: error (exception set)
- * -2: not found (no exception set)
- * rest: valid rev
- */
-static int index_find_node(indexObject *self,
- const char *node, Py_ssize_t nodelen)
-{
- int rev;
-
- self->ntlookups++;
- rev = nt_find(self, node, nodelen, 0);
- if (rev >= -1)
- return rev;
-
- if (nt_init(self) == -1)
- return -3;
-
- /*
- * For the first handful of lookups, we scan the entire index,
- * and cache only the matching nodes. This optimizes for cases
- * like "hg tip", where only a few nodes are accessed.
- *
- * After that, we cache every node we visit, using a single
- * scan amortized over multiple lookups. This gives the best
- * bulk performance, e.g. for "hg log".
- */
- if (self->ntmisses++ < 4) {
- for (rev = self->ntrev - 1; rev >= 0; rev--) {
- const char *n = index_node(self, rev);
- if (n == NULL)
- return -2;
- if (memcmp(node, n, nodelen > 20 ? 20 : nodelen) == 0) {
- if (nt_insert(self, n, rev) == -1)
- return -3;
- break;
- }
- }
- } else {
- for (rev = self->ntrev - 1; rev >= 0; rev--) {
- const char *n = index_node(self, rev);
- if (n == NULL) {
- self->ntrev = rev + 1;
- return -2;
- }
- if (nt_insert(self, n, rev) == -1) {
- self->ntrev = rev + 1;
- return -3;
- }
- if (memcmp(node, n, nodelen > 20 ? 20 : nodelen) == 0) {
- break;
- }
- }
- self->ntrev = rev;
- }
-
- if (rev >= 0)
- return rev;
- return -2;
-}
-
-static void raise_revlog_error(void)
-{
- PyObject *mod = NULL, *dict = NULL, *errclass = NULL;
-
- mod = PyImport_ImportModule("mercurial.error");
- if (mod == NULL) {
- goto cleanup;
- }
-
- dict = PyModule_GetDict(mod);
- if (dict == NULL) {
- goto cleanup;
- }
- Py_INCREF(dict);
-
- errclass = PyDict_GetItemString(dict, "RevlogError");
- if (errclass == NULL) {
- PyErr_SetString(PyExc_SystemError,
- "could not find RevlogError");
- goto cleanup;
- }
-
- /* value of exception is ignored by callers */
- PyErr_SetString(errclass, "RevlogError");
-
-cleanup:
- Py_XDECREF(dict);
- Py_XDECREF(mod);
-}
-
-static PyObject *index_getitem(indexObject *self, PyObject *value)
-{
- char *node;
- Py_ssize_t nodelen;
- int rev;
-
- if (PyInt_Check(value))
- return index_get(self, PyInt_AS_LONG(value));
-
- if (node_check(value, &node, &nodelen) == -1)
- return NULL;
- rev = index_find_node(self, node, nodelen);
- if (rev >= -1)
- return PyInt_FromLong(rev);
- if (rev == -2)
- raise_revlog_error();
- return NULL;
-}
-
-static int nt_partialmatch(indexObject *self, const char *node,
- Py_ssize_t nodelen)
-{
- int rev;
-
- if (nt_init(self) == -1)
- return -3;
-
- if (self->ntrev > 0) {
- /* ensure that the radix tree is fully populated */
- for (rev = self->ntrev - 1; rev >= 0; rev--) {
- const char *n = index_node(self, rev);
- if (n == NULL)
- return -2;
- if (nt_insert(self, n, rev) == -1)
- return -3;
- }
- self->ntrev = rev;
- }
-
- return nt_find(self, node, nodelen, 1);
-}
-
-static PyObject *index_partialmatch(indexObject *self, PyObject *args)
-{
- const char *fullnode;
- int nodelen;
- char *node;
- int rev, i;
-
- if (!PyArg_ParseTuple(args, "s#", &node, &nodelen))
- return NULL;
-
- if (nodelen < 4) {
- PyErr_SetString(PyExc_ValueError, "key too short");
- return NULL;
- }
-
- if (nodelen > 40) {
- PyErr_SetString(PyExc_ValueError, "key too long");
- return NULL;
- }
-
- for (i = 0; i < nodelen; i++)
- hexdigit(node, i);
- if (PyErr_Occurred()) {
- /* input contains non-hex characters */
- PyErr_Clear();
- Py_RETURN_NONE;
- }
-
- rev = nt_partialmatch(self, node, nodelen);
-
- switch (rev) {
- case -4:
- raise_revlog_error();
- case -3:
- return NULL;
- case -2:
- Py_RETURN_NONE;
- case -1:
- return PyBytes_FromStringAndSize(nullid, 20);
- }
-
- fullnode = index_node(self, rev);
- if (fullnode == NULL) {
- PyErr_Format(PyExc_IndexError,
- "could not access rev %d", rev);
- return NULL;
- }
- return PyBytes_FromStringAndSize(fullnode, 20);
-}
-
-static PyObject *index_m_get(indexObject *self, PyObject *args)
-{
- Py_ssize_t nodelen;
- PyObject *val;
- char *node;
- int rev;
-
- if (!PyArg_ParseTuple(args, "O", &val))
- return NULL;
- if (node_check(val, &node, &nodelen) == -1)
- return NULL;
- rev = index_find_node(self, node, nodelen);
- if (rev == -3)
- return NULL;
- if (rev == -2)
- Py_RETURN_NONE;
- return PyInt_FromLong(rev);
-}
-
-static int index_contains(indexObject *self, PyObject *value)
-{
- char *node;
- Py_ssize_t nodelen;
-
- if (PyInt_Check(value)) {
- long rev = PyInt_AS_LONG(value);
- return rev >= -1 && rev < index_length(self);
- }
-
- if (node_check(value, &node, &nodelen) == -1)
- return -1;
-
- switch (index_find_node(self, node, nodelen)) {
- case -3:
- return -1;
- case -2:
- return 0;
- default:
- return 1;
- }
-}
-
-typedef uint64_t bitmask;
-
-/*
- * Given a disjoint set of revs, return all candidates for the
- * greatest common ancestor. In revset notation, this is the set
- * "heads(::a and ::b and ...)"
- */
-static PyObject *find_gca_candidates(indexObject *self, const int *revs,
- int revcount)
-{
- const bitmask allseen = (1ull << revcount) - 1;
- const bitmask poison = 1ull << revcount;
- PyObject *gca = PyList_New(0);
- int i, v, interesting;
- int maxrev = -1;
- bitmask sp;
- bitmask *seen;
-
- if (gca == NULL)
- return PyErr_NoMemory();
-
- for (i = 0; i < revcount; i++) {
- if (revs[i] > maxrev)
- maxrev = revs[i];
- }
-
- seen = calloc(sizeof(*seen), maxrev + 1);
- if (seen == NULL) {
- Py_DECREF(gca);
- return PyErr_NoMemory();
- }
-
- for (i = 0; i < revcount; i++)
- seen[revs[i]] = 1ull << i;
-
- interesting = revcount;
-
- for (v = maxrev; v >= 0 && interesting; v--) {
- bitmask sv = seen[v];
- int parents[2];
-
- if (!sv)
- continue;
-
- if (sv < poison) {
- interesting -= 1;
- if (sv == allseen) {
- PyObject *obj = PyInt_FromLong(v);
- if (obj == NULL)
- goto bail;
- if (PyList_Append(gca, obj) == -1) {
- Py_DECREF(obj);
- goto bail;
- }
- sv |= poison;
- for (i = 0; i < revcount; i++) {
- if (revs[i] == v)
- goto done;
- }
- }
- }
- if (index_get_parents(self, v, parents, maxrev) < 0)
- goto bail;
-
- for (i = 0; i < 2; i++) {
- int p = parents[i];
- if (p == -1)
- continue;
- sp = seen[p];
- if (sv < poison) {
- if (sp == 0) {
- seen[p] = sv;
- interesting++;
- }
- else if (sp != sv)
- seen[p] |= sv;
- } else {
- if (sp && sp < poison)
- interesting--;
- seen[p] = sv;
- }
- }
- }
-
-done:
- free(seen);
- return gca;
-bail:
- free(seen);
- Py_XDECREF(gca);
- return NULL;
-}
-
-/*
- * Given a disjoint set of revs, return the subset with the longest
- * path to the root.
- */
-static PyObject *find_deepest(indexObject *self, PyObject *revs)
-{
- const Py_ssize_t revcount = PyList_GET_SIZE(revs);
- static const Py_ssize_t capacity = 24;
- int *depth, *interesting = NULL;
- int i, j, v, ninteresting;
- PyObject *dict = NULL, *keys = NULL;
- long *seen = NULL;
- int maxrev = -1;
- long final;
-
- if (revcount > capacity) {
- PyErr_Format(PyExc_OverflowError,
- "bitset size (%ld) > capacity (%ld)",
- (long)revcount, (long)capacity);
- return NULL;
- }
-
- for (i = 0; i < revcount; i++) {
- int n = (int)PyInt_AsLong(PyList_GET_ITEM(revs, i));
- if (n > maxrev)
- maxrev = n;
- }
-
- depth = calloc(sizeof(*depth), maxrev + 1);
- if (depth == NULL)
- return PyErr_NoMemory();
-
- seen = calloc(sizeof(*seen), maxrev + 1);
- if (seen == NULL) {
- PyErr_NoMemory();
- goto bail;
- }
-
- interesting = calloc(sizeof(*interesting), 2 << revcount);
- if (interesting == NULL) {
- PyErr_NoMemory();
- goto bail;
- }
-
- if (PyList_Sort(revs) == -1)
- goto bail;
-
- for (i = 0; i < revcount; i++) {
- int n = (int)PyInt_AsLong(PyList_GET_ITEM(revs, i));
- long b = 1l << i;
- depth[n] = 1;
- seen[n] = b;
- interesting[b] = 1;
- }
-
- ninteresting = (int)revcount;
-
- for (v = maxrev; v >= 0 && ninteresting > 1; v--) {
- int dv = depth[v];
- int parents[2];
- long sv;
-
- if (dv == 0)
- continue;
-
- sv = seen[v];
- if (index_get_parents(self, v, parents, maxrev) < 0)
- goto bail;
-
- for (i = 0; i < 2; i++) {
- int p = parents[i];
- long sp;
- int dp;
-
- if (p == -1)
- continue;
-
- dp = depth[p];
- sp = seen[p];
- if (dp <= dv) {
- depth[p] = dv + 1;
- if (sp != sv) {
- interesting[sv] += 1;
- seen[p] = sv;
- if (sp) {
- interesting[sp] -= 1;
- if (interesting[sp] == 0)
- ninteresting -= 1;
- }
- }
- }
- else if (dv == dp - 1) {
- long nsp = sp | sv;
- if (nsp == sp)
- continue;
- seen[p] = nsp;
- interesting[sp] -= 1;
- if (interesting[sp] == 0 && interesting[nsp] > 0)
- ninteresting -= 1;
- interesting[nsp] += 1;
- }
- }
- interesting[sv] -= 1;
- if (interesting[sv] == 0)
- ninteresting -= 1;
- }
-
- final = 0;
- j = ninteresting;
- for (i = 0; i < (int)(2 << revcount) && j > 0; i++) {
- if (interesting[i] == 0)
- continue;
- final |= i;
- j -= 1;
- }
- if (final == 0) {
- keys = PyList_New(0);
- goto bail;
- }
-
- dict = PyDict_New();
- if (dict == NULL)
- goto bail;
-
- for (i = 0; i < revcount; i++) {
- PyObject *key;
-
- if ((final & (1 << i)) == 0)
- continue;
-
- key = PyList_GET_ITEM(revs, i);
- Py_INCREF(key);
- Py_INCREF(Py_None);
- if (PyDict_SetItem(dict, key, Py_None) == -1) {
- Py_DECREF(key);
- Py_DECREF(Py_None);
- goto bail;
- }
- }
-
- keys = PyDict_Keys(dict);
-
-bail:
- free(depth);
- free(seen);
- free(interesting);
- Py_XDECREF(dict);
-
- return keys;
-}
-
-/*
- * Given a (possibly overlapping) set of revs, return all the
- * common ancestors heads: heads(::args[0] and ::a[1] and ...)
- */
-static PyObject *index_commonancestorsheads(indexObject *self, PyObject *args)
-{
- PyObject *ret = NULL;
- Py_ssize_t argcount, i, len;
- bitmask repeat = 0;
- int revcount = 0;
- int *revs;
-
- argcount = PySequence_Length(args);
- revs = PyMem_Malloc(argcount * sizeof(*revs));
- if (argcount > 0 && revs == NULL)
- return PyErr_NoMemory();
- len = index_length(self) - 1;
-
- for (i = 0; i < argcount; i++) {
- static const int capacity = 24;
- PyObject *obj = PySequence_GetItem(args, i);
- bitmask x;
- long val;
-
- if (!PyInt_Check(obj)) {
- PyErr_SetString(PyExc_TypeError,
- "arguments must all be ints");
- Py_DECREF(obj);
- goto bail;
- }
- val = PyInt_AsLong(obj);
- Py_DECREF(obj);
- if (val == -1) {
- ret = PyList_New(0);
- goto done;
- }
- if (val < 0 || val >= len) {
- PyErr_SetString(PyExc_IndexError,
- "index out of range");
- goto bail;
- }
- /* this cheesy bloom filter lets us avoid some more
- * expensive duplicate checks in the common set-is-disjoint
- * case */
- x = 1ull << (val & 0x3f);
- if (repeat & x) {
- int k;
- for (k = 0; k < revcount; k++) {
- if (val == revs[k])
- goto duplicate;
- }
- }
- else repeat |= x;
- if (revcount >= capacity) {
- PyErr_Format(PyExc_OverflowError,
- "bitset size (%d) > capacity (%d)",
- revcount, capacity);
- goto bail;
- }
- revs[revcount++] = (int)val;
- duplicate:;
- }
-
- if (revcount == 0) {
- ret = PyList_New(0);
- goto done;
- }
- if (revcount == 1) {
- PyObject *obj;
- ret = PyList_New(1);
- if (ret == NULL)
- goto bail;
- obj = PyInt_FromLong(revs[0]);
- if (obj == NULL)
- goto bail;
- PyList_SET_ITEM(ret, 0, obj);
- goto done;
- }
-
- ret = find_gca_candidates(self, revs, revcount);
- if (ret == NULL)
- goto bail;
-
-done:
- PyMem_Free(revs);
- return ret;
-
-bail:
- PyMem_Free(revs);
- Py_XDECREF(ret);
- return NULL;
-}
-
-/*
- * Given a (possibly overlapping) set of revs, return the greatest
- * common ancestors: those with the longest path to the root.
- */
-static PyObject *index_ancestors(indexObject *self, PyObject *args)
-{
- PyObject *ret;
- PyObject *gca = index_commonancestorsheads(self, args);
- if (gca == NULL)
- return NULL;
-
- if (PyList_GET_SIZE(gca) <= 1) {
- return gca;
- }
-
- ret = find_deepest(self, gca);
- Py_DECREF(gca);
- return ret;
-}
-
-/*
- * Invalidate any trie entries introduced by added revs.
- */
-static void nt_invalidate_added(indexObject *self, Py_ssize_t start)
-{
- Py_ssize_t i, len = PyList_GET_SIZE(self->added);
-
- for (i = start; i < len; i++) {
- PyObject *tuple = PyList_GET_ITEM(self->added, i);
- PyObject *node = PyTuple_GET_ITEM(tuple, 7);
-
- nt_insert(self, PyBytes_AS_STRING(node), -1);
- }
-
- if (start == 0)
- Py_CLEAR(self->added);
-}
-
-/*
- * Delete a numeric range of revs, which must be at the end of the
- * range, but exclude the sentinel nullid entry.
- */
-static int index_slice_del(indexObject *self, PyObject *item)
-{
- Py_ssize_t start, stop, step, slicelength;
- Py_ssize_t length = index_length(self);
- int ret = 0;
-
-/* Argument changed from PySliceObject* to PyObject* in Python 3. */
-#ifdef IS_PY3K
- if (PySlice_GetIndicesEx(item, length,
-#else
- if (PySlice_GetIndicesEx((PySliceObject*)item, length,
-#endif
- &start, &stop, &step, &slicelength) < 0)
- return -1;
-
- if (slicelength <= 0)
- return 0;
-
- if ((step < 0 && start < stop) || (step > 0 && start > stop))
- stop = start;
-
- if (step < 0) {
- stop = start + 1;
- start = stop + step*(slicelength - 1) - 1;
- step = -step;
- }
-
- if (step != 1) {
- PyErr_SetString(PyExc_ValueError,
- "revlog index delete requires step size of 1");
- return -1;
- }
-
- if (stop != length - 1) {
- PyErr_SetString(PyExc_IndexError,
- "revlog index deletion indices are invalid");
- return -1;
- }
-
- if (start < self->length - 1) {
- if (self->nt) {
- Py_ssize_t i;
-
- for (i = start + 1; i < self->length - 1; i++) {
- const char *node = index_node(self, i);
-
- if (node)
- nt_insert(self, node, -1);
- }
- if (self->added)
- nt_invalidate_added(self, 0);
- if (self->ntrev > start)
- self->ntrev = (int)start;
- }
- self->length = start + 1;
- if (start < self->raw_length) {
- if (self->cache) {
- Py_ssize_t i;
- for (i = start; i < self->raw_length; i++)
- Py_CLEAR(self->cache[i]);
- }
- self->raw_length = start;
- }
- goto done;
- }
-
- if (self->nt) {
- nt_invalidate_added(self, start - self->length + 1);
- if (self->ntrev > start)
- self->ntrev = (int)start;
- }
- if (self->added)
- ret = PyList_SetSlice(self->added, start - self->length + 1,
- PyList_GET_SIZE(self->added), NULL);
-done:
- Py_CLEAR(self->headrevs);
- return ret;
-}
-
-/*
- * Supported ops:
- *
- * slice deletion
- * string assignment (extend node->rev mapping)
- * string deletion (shrink node->rev mapping)
- */
-static int index_assign_subscript(indexObject *self, PyObject *item,
- PyObject *value)
-{
- char *node;
- Py_ssize_t nodelen;
- long rev;
-
- if (PySlice_Check(item) && value == NULL)
- return index_slice_del(self, item);
-
- if (node_check(item, &node, &nodelen) == -1)
- return -1;
-
- if (value == NULL)
- return self->nt ? nt_insert(self, node, -1) : 0;
- rev = PyInt_AsLong(value);
- if (rev > INT_MAX || rev < 0) {
- if (!PyErr_Occurred())
- PyErr_SetString(PyExc_ValueError, "rev out of range");
- return -1;
- }
-
- if (nt_init(self) == -1)
- return -1;
- return nt_insert(self, node, (int)rev);
-}
-
-/*
- * Find all RevlogNG entries in an index that has inline data. Update
- * the optional "offsets" table with those entries.
- */
-static Py_ssize_t inline_scan(indexObject *self, const char **offsets)
-{
- const char *data = (const char *)self->buf.buf;
- Py_ssize_t pos = 0;
- Py_ssize_t end = self->buf.len;
- long incr = v1_hdrsize;
- Py_ssize_t len = 0;
-
- while (pos + v1_hdrsize <= end && pos >= 0) {
- uint32_t comp_len;
- /* 3rd element of header is length of compressed inline data */
- comp_len = getbe32(data + pos + 8);
- incr = v1_hdrsize + comp_len;
- if (offsets)
- offsets[len] = data + pos;
- len++;
- pos += incr;
- }
-
- if (pos != end) {
- if (!PyErr_Occurred())
- PyErr_SetString(PyExc_ValueError, "corrupt index file");
- return -1;
- }
-
- return len;
-}
-
-static int index_init(indexObject *self, PyObject *args)
-{
- PyObject *data_obj, *inlined_obj;
- Py_ssize_t size;
-
- /* Initialize before argument-checking to avoid index_dealloc() crash. */
- self->raw_length = 0;
- self->added = NULL;
- self->cache = NULL;
- self->data = NULL;
- memset(&self->buf, 0, sizeof(self->buf));
- self->headrevs = NULL;
- self->filteredrevs = Py_None;
- Py_INCREF(Py_None);
- self->nt = NULL;
- self->offsets = NULL;
-
- if (!PyArg_ParseTuple(args, "OO", &data_obj, &inlined_obj))
- return -1;
- if (!PyObject_CheckBuffer(data_obj)) {
- PyErr_SetString(PyExc_TypeError,
- "data does not support buffer interface");
- return -1;
- }
-
- if (PyObject_GetBuffer(data_obj, &self->buf, PyBUF_SIMPLE) == -1)
- return -1;
- size = self->buf.len;
-
- self->inlined = inlined_obj && PyObject_IsTrue(inlined_obj);
- self->data = data_obj;
-
- self->ntlength = self->ntcapacity = 0;
- self->ntdepth = self->ntsplits = 0;
- self->ntlookups = self->ntmisses = 0;
- self->ntrev = -1;
- Py_INCREF(self->data);
-
- if (self->inlined) {
- Py_ssize_t len = inline_scan(self, NULL);
- if (len == -1)
- goto bail;
- self->raw_length = len;
- self->length = len + 1;
- } else {
- if (size % v1_hdrsize) {
- PyErr_SetString(PyExc_ValueError, "corrupt index file");
- goto bail;
- }
- self->raw_length = size / v1_hdrsize;
- self->length = self->raw_length + 1;
- }
-
- return 0;
-bail:
- return -1;
-}
-
-static PyObject *index_nodemap(indexObject *self)
-{
- Py_INCREF(self);
- return (PyObject *)self;
-}
-
-static void index_dealloc(indexObject *self)
-{
- _index_clearcaches(self);
- Py_XDECREF(self->filteredrevs);
- if (self->buf.buf) {
- PyBuffer_Release(&self->buf);
- memset(&self->buf, 0, sizeof(self->buf));
- }
- Py_XDECREF(self->data);
- Py_XDECREF(self->added);
- PyObject_Del(self);
-}
-
-static PySequenceMethods index_sequence_methods = {
- (lenfunc)index_length, /* sq_length */
- 0, /* sq_concat */
- 0, /* sq_repeat */
- (ssizeargfunc)index_get, /* sq_item */
- 0, /* sq_slice */
- 0, /* sq_ass_item */
- 0, /* sq_ass_slice */
- (objobjproc)index_contains, /* sq_contains */
-};
-
-static PyMappingMethods index_mapping_methods = {
- (lenfunc)index_length, /* mp_length */
- (binaryfunc)index_getitem, /* mp_subscript */
- (objobjargproc)index_assign_subscript, /* mp_ass_subscript */
-};
-
-static PyMethodDef index_methods[] = {
- {"ancestors", (PyCFunction)index_ancestors, METH_VARARGS,
- "return the gca set of the given revs"},
- {"commonancestorsheads", (PyCFunction)index_commonancestorsheads,
- METH_VARARGS,
- "return the heads of the common ancestors of the given revs"},
- {"clearcaches", (PyCFunction)index_clearcaches, METH_NOARGS,
- "clear the index caches"},
- {"get", (PyCFunction)index_m_get, METH_VARARGS,
- "get an index entry"},
- {"computephasesmapsets", (PyCFunction)compute_phases_map_sets,
- METH_VARARGS, "compute phases"},
- {"reachableroots2", (PyCFunction)reachableroots2, METH_VARARGS,
- "reachableroots"},
- {"headrevs", (PyCFunction)index_headrevs, METH_VARARGS,
- "get head revisions"}, /* Can do filtering since 3.2 */
- {"headrevsfiltered", (PyCFunction)index_headrevs, METH_VARARGS,
- "get filtered head revisions"}, /* Can always do filtering */
- {"insert", (PyCFunction)index_insert, METH_VARARGS,
- "insert an index entry"},
- {"partialmatch", (PyCFunction)index_partialmatch, METH_VARARGS,
- "match a potentially ambiguous node ID"},
- {"stats", (PyCFunction)index_stats, METH_NOARGS,
- "stats for the index"},
- {NULL} /* Sentinel */
-};
-
-static PyGetSetDef index_getset[] = {
- {"nodemap", (getter)index_nodemap, NULL, "nodemap", NULL},
- {NULL} /* Sentinel */
-};
-
-static PyTypeObject indexType = {
- PyVarObject_HEAD_INIT(NULL, 0)
- "parsers.index", /* tp_name */
- sizeof(indexObject), /* tp_basicsize */
- 0, /* tp_itemsize */
- (destructor)index_dealloc, /* tp_dealloc */
- 0, /* tp_print */
- 0, /* tp_getattr */
- 0, /* tp_setattr */
- 0, /* tp_compare */
- 0, /* tp_repr */
- 0, /* tp_as_number */
- &index_sequence_methods, /* tp_as_sequence */
- &index_mapping_methods, /* tp_as_mapping */
- 0, /* tp_hash */
- 0, /* tp_call */
- 0, /* tp_str */
- 0, /* tp_getattro */
- 0, /* tp_setattro */
- 0, /* tp_as_buffer */
- Py_TPFLAGS_DEFAULT, /* tp_flags */
- "revlog index", /* tp_doc */
- 0, /* tp_traverse */
- 0, /* tp_clear */
- 0, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- 0, /* tp_iter */
- 0, /* tp_iternext */
- index_methods, /* tp_methods */
- 0, /* tp_members */
- index_getset, /* tp_getset */
- 0, /* tp_base */
- 0, /* tp_dict */
- 0, /* tp_descr_get */
- 0, /* tp_descr_set */
- 0, /* tp_dictoffset */
- (initproc)index_init, /* tp_init */
- 0, /* tp_alloc */
-};
-
-/*
- * returns a tuple of the form (index, index, cache) with elements as
- * follows:
- *
- * index: an index object that lazily parses RevlogNG records
- * cache: if data is inlined, a tuple (0, index_file_content), else None
- * index_file_content could be a string, or a buffer
- *
- * added complications are for backwards compatibility
- */
-static PyObject *parse_index2(PyObject *self, PyObject *args)
-{
- PyObject *tuple = NULL, *cache = NULL;
- indexObject *idx;
- int ret;
-
- idx = PyObject_New(indexObject, &indexType);
- if (idx == NULL)
- goto bail;
-
- ret = index_init(idx, args);
- if (ret == -1)
- goto bail;
-
- if (idx->inlined) {
- cache = Py_BuildValue("iO", 0, idx->data);
- if (cache == NULL)
- goto bail;
- } else {
- cache = Py_None;
- Py_INCREF(cache);
- }
-
- tuple = Py_BuildValue("NN", idx, cache);
- if (!tuple)
- goto bail;
- return tuple;
-
-bail:
- Py_XDECREF(idx);
- Py_XDECREF(cache);
- Py_XDECREF(tuple);
- return NULL;
-}
-
-#define BUMPED_FIX 1
-#define USING_SHA_256 2
-#define FM1_HEADER_SIZE (4 + 8 + 2 + 2 + 1 + 1 + 1)
-
-static PyObject *readshas(
- const char *source, unsigned char num, Py_ssize_t hashwidth)
-{
- int i;
- PyObject *list = PyTuple_New(num);
- if (list == NULL) {
- return NULL;
- }
- for (i = 0; i < num; i++) {
- PyObject *hash = PyBytes_FromStringAndSize(source, hashwidth);
- if (hash == NULL) {
- Py_DECREF(list);
- return NULL;
- }
- PyTuple_SET_ITEM(list, i, hash);
- source += hashwidth;
- }
- return list;
-}
-
-static PyObject *fm1readmarker(const char *databegin, const char *dataend,
- uint32_t *msize)
-{
- const char *data = databegin;
- const char *meta;
-
- double mtime;
- int16_t tz;
- uint16_t flags;
- unsigned char nsuccs, nparents, nmetadata;
- Py_ssize_t hashwidth = 20;
-
- PyObject *prec = NULL, *parents = NULL, *succs = NULL;
- PyObject *metadata = NULL, *ret = NULL;
- int i;
-
- if (data + FM1_HEADER_SIZE > dataend) {
- goto overflow;
- }
-
- *msize = getbe32(data);
- data += 4;
- mtime = getbefloat64(data);
- data += 8;
- tz = getbeint16(data);
- data += 2;
- flags = getbeuint16(data);
- data += 2;
-
- if (flags & USING_SHA_256) {
- hashwidth = 32;
- }
-
- nsuccs = (unsigned char)(*data++);
- nparents = (unsigned char)(*data++);
- nmetadata = (unsigned char)(*data++);
-
- if (databegin + *msize > dataend) {
- goto overflow;
- }
- dataend = databegin + *msize; /* narrow down to marker size */
-
- if (data + hashwidth > dataend) {
- goto overflow;
- }
- prec = PyBytes_FromStringAndSize(data, hashwidth);
- data += hashwidth;
- if (prec == NULL) {
- goto bail;
- }
-
- if (data + nsuccs * hashwidth > dataend) {
- goto overflow;
- }
- succs = readshas(data, nsuccs, hashwidth);
- if (succs == NULL) {
- goto bail;
- }
- data += nsuccs * hashwidth;
-
- if (nparents == 1 || nparents == 2) {
- if (data + nparents * hashwidth > dataend) {
- goto overflow;
- }
- parents = readshas(data, nparents, hashwidth);
- if (parents == NULL) {
- goto bail;
- }
- data += nparents * hashwidth;
- } else {
- parents = Py_None;
- Py_INCREF(parents);
- }
-
- if (data + 2 * nmetadata > dataend) {
- goto overflow;
- }
- meta = data + (2 * nmetadata);
- metadata = PyTuple_New(nmetadata);
- if (metadata == NULL) {
- goto bail;
- }
- for (i = 0; i < nmetadata; i++) {
- PyObject *tmp, *left = NULL, *right = NULL;
- Py_ssize_t leftsize = (unsigned char)(*data++);
- Py_ssize_t rightsize = (unsigned char)(*data++);
- if (meta + leftsize + rightsize > dataend) {
- goto overflow;
- }
- left = PyBytes_FromStringAndSize(meta, leftsize);
- meta += leftsize;
- right = PyBytes_FromStringAndSize(meta, rightsize);
- meta += rightsize;
- tmp = PyTuple_New(2);
- if (!left || !right || !tmp) {
- Py_XDECREF(left);
- Py_XDECREF(right);
- Py_XDECREF(tmp);
- goto bail;
- }
- PyTuple_SET_ITEM(tmp, 0, left);
- PyTuple_SET_ITEM(tmp, 1, right);
- PyTuple_SET_ITEM(metadata, i, tmp);
- }
- ret = Py_BuildValue("(OOHO(di)O)", prec, succs, flags,
- metadata, mtime, (int)tz * 60, parents);
- goto bail; /* return successfully */
-
-overflow:
- PyErr_SetString(PyExc_ValueError, "overflow in obsstore");
-bail:
- Py_XDECREF(prec);
- Py_XDECREF(succs);
- Py_XDECREF(metadata);
- Py_XDECREF(parents);
- return ret;
-}
-
-
-static PyObject *fm1readmarkers(PyObject *self, PyObject *args) {
- const char *data, *dataend;
- int datalen;
- Py_ssize_t offset, stop;
- PyObject *markers = NULL;
-
- if (!PyArg_ParseTuple(args, "s#nn", &data, &datalen, &offset, &stop)) {
- return NULL;
- }
- dataend = data + datalen;
- data += offset;
- markers = PyList_New(0);
- if (!markers) {
- return NULL;
- }
- while (offset < stop) {
- uint32_t msize;
- int error;
- PyObject *record = fm1readmarker(data, dataend, &msize);
- if (!record) {
- goto bail;
- }
- error = PyList_Append(markers, record);
- Py_DECREF(record);
- if (error) {
- goto bail;
- }
- data += msize;
- offset += msize;
- }
- return markers;
-bail:
- Py_DECREF(markers);
- return NULL;
-}
-
-static char parsers_doc[] = "Efficient content parsing.";
-
-PyObject *encodedir(PyObject *self, PyObject *args);
-PyObject *pathencode(PyObject *self, PyObject *args);
-PyObject *lowerencode(PyObject *self, PyObject *args);
-
-static PyMethodDef methods[] = {
- {"pack_dirstate", pack_dirstate, METH_VARARGS, "pack a dirstate\n"},
- {"nonnormalotherparententries", nonnormalotherparententries, METH_VARARGS,
- "create a set containing non-normal and other parent entries of given "
- "dirstate\n"},
- {"parse_manifest", parse_manifest, METH_VARARGS, "parse a manifest\n"},
- {"parse_dirstate", parse_dirstate, METH_VARARGS, "parse a dirstate\n"},
- {"parse_index2", parse_index2, METH_VARARGS, "parse a revlog index\n"},
- {"asciilower", asciilower, METH_VARARGS, "lowercase an ASCII string\n"},
- {"asciiupper", asciiupper, METH_VARARGS, "uppercase an ASCII string\n"},
- {"dict_new_presized", dict_new_presized, METH_VARARGS,
- "construct a dict with an expected size\n"},
- {"make_file_foldmap", make_file_foldmap, METH_VARARGS,
- "make file foldmap\n"},
- {"encodedir", encodedir, METH_VARARGS, "encodedir a path\n"},
- {"pathencode", pathencode, METH_VARARGS, "fncache-encode a path\n"},
- {"lowerencode", lowerencode, METH_VARARGS, "lower-encode a path\n"},
- {"fm1readmarkers", fm1readmarkers, METH_VARARGS,
- "parse v1 obsolete markers\n"},
- {NULL, NULL}
-};
-
-void dirs_module_init(PyObject *mod);
-void manifest_module_init(PyObject *mod);
-
-static void module_init(PyObject *mod)
-{
- /* This module constant has two purposes. First, it lets us unit test
- * the ImportError raised without hard-coding any error text. This
- * means we can change the text in the future without breaking tests,
- * even across changesets without a recompile. Second, its presence
- * can be used to determine whether the version-checking logic is
- * present, which also helps in testing across changesets without a
- * recompile. Note that this means the pure-Python version of parsers
- * should not have this module constant. */
- PyModule_AddStringConstant(mod, "versionerrortext", versionerrortext);
-
- dirs_module_init(mod);
- manifest_module_init(mod);
-
- indexType.tp_new = PyType_GenericNew;
- if (PyType_Ready(&indexType) < 0 ||
- PyType_Ready(&dirstateTupleType) < 0)
- return;
- Py_INCREF(&indexType);
- PyModule_AddObject(mod, "index", (PyObject *)&indexType);
- Py_INCREF(&dirstateTupleType);
- PyModule_AddObject(mod, "dirstatetuple",
- (PyObject *)&dirstateTupleType);
-
- nullentry = Py_BuildValue("iiiiiiis#", 0, 0, 0,
- -1, -1, -1, -1, nullid, 20);
- if (nullentry)
- PyObject_GC_UnTrack(nullentry);
-}
-
-static int check_python_version(void)
-{
- PyObject *sys = PyImport_ImportModule("sys"), *ver;
- long hexversion;
- if (!sys)
- return -1;
- ver = PyObject_GetAttrString(sys, "hexversion");
- Py_DECREF(sys);
- if (!ver)
- return -1;
- hexversion = PyInt_AsLong(ver);
- Py_DECREF(ver);
- /* sys.hexversion is a 32-bit number by default, so the -1 case
- * should only occur in unusual circumstances (e.g. if sys.hexversion
- * is manually set to an invalid value). */
- if ((hexversion == -1) || (hexversion >> 16 != PY_VERSION_HEX >> 16)) {
- PyErr_Format(PyExc_ImportError, "%s: The Mercurial extension "
- "modules were compiled with Python " PY_VERSION ", but "
- "Mercurial is currently using Python with sys.hexversion=%ld: "
- "Python %s\n at: %s", versionerrortext, hexversion,
- Py_GetVersion(), Py_GetProgramFullPath());
- return -1;
- }
- return 0;
-}
-
-#ifdef IS_PY3K
-static struct PyModuleDef parsers_module = {
- PyModuleDef_HEAD_INIT,
- "parsers",
- parsers_doc,
- -1,
- methods
-};
-
-PyMODINIT_FUNC PyInit_parsers(void)
-{
- PyObject *mod;
-
- if (check_python_version() == -1)
- return NULL;
- mod = PyModule_Create(&parsers_module);
- module_init(mod);
- return mod;
-}
-#else
-PyMODINIT_FUNC initparsers(void)
-{
- PyObject *mod;
-
- if (check_python_version() == -1)
- return;
- mod = Py_InitModule3("parsers", methods, parsers_doc);
- module_init(mod);
-}
-#endif
--- a/mercurial/patch.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/patch.py Tue Jun 20 16:33:46 2017 -0400
@@ -26,20 +26,21 @@
short,
)
from . import (
- base85,
copies,
- diffhelpers,
encoding,
error,
mail,
mdiff,
pathutil,
+ policy,
pycompat,
scmutil,
similar,
util,
vfs as vfsmod,
)
+
+diffhelpers = policy.importmod(r'diffhelpers')
stringio = util.stringio
gitre = re.compile(br'diff --git a/(.*) b/(.*)')
@@ -1430,7 +1431,7 @@
else:
l = ord(l) - ord('a') + 27
try:
- dec.append(base85.b85decode(line[1:])[:l])
+ dec.append(util.b85decode(line[1:])[:l])
except ValueError as e:
raise PatchError(_('could not decode "%s" binary patch: %s')
% (self._fname, str(e)))
@@ -2508,6 +2509,9 @@
revinfo = ' '.join(["-r %s" % rev for rev in revs])
return 'diff %s %s' % (revinfo, f)
+ def isempty(fctx):
+ return fctx is None or fctx.size() == 0
+
date1 = util.datestr(ctx1.date())
date2 = util.datestr(ctx2.date())
@@ -2523,28 +2527,30 @@
for f1, f2, copyop in _filepairs(modified, added, removed, copy, opts):
content1 = None
content2 = None
+ fctx1 = None
+ fctx2 = None
flag1 = None
flag2 = None
if f1:
- content1 = getfilectx(f1, ctx1).data()
+ fctx1 = getfilectx(f1, ctx1)
if opts.git or losedatafn:
flag1 = ctx1.flags(f1)
if f2:
- content2 = getfilectx(f2, ctx2).data()
+ fctx2 = getfilectx(f2, ctx2)
if opts.git or losedatafn:
flag2 = ctx2.flags(f2)
- binary = False
- if opts.git or losedatafn:
- binary = util.binary(content1) or util.binary(content2)
+ # if binary is True, output "summary" or "base85", but not "text diff"
+ binary = not opts.text and any(f.isbinary()
+ for f in [fctx1, fctx2] if f is not None)
if losedatafn and not opts.git:
if (binary or
# copy/rename
f2 in copy or
# empty file creation
- (not f1 and not content2) or
+ (not f1 and isempty(fctx2)) or
# empty file deletion
- (not content1 and not f2) or
+ (isempty(fctx1) and not f2) or
# create with flags
(not f1 and flag2) or
# change flags
@@ -2577,7 +2583,37 @@
elif revs and not repo.ui.quiet:
header.append(diffline(path1, revs))
- if binary and opts.git and not opts.nobinary and not opts.text:
+ # fctx.is | diffopts | what to | is fctx.data()
+ # binary() | text nobinary git index | output? | outputted?
+ # ------------------------------------|----------------------------
+ # yes | no no no * | summary | no
+ # yes | no no yes * | base85 | yes
+ # yes | no yes no * | summary | no
+ # yes | no yes yes 0 | summary | no
+ # yes | no yes yes >0 | summary | semi [1]
+ # yes | yes * * * | text diff | yes
+ # no | * * * * | text diff | yes
+ # [1]: hash(fctx.data()) is outputted. so fctx.data() cannot be faked
+ if binary and (not opts.git or (opts.git and opts.nobinary and not
+ opts.index)):
+ # fast path: no binary content will be displayed, content1 and
+ # content2 are only used for equivalent test. cmp() could have a
+ # fast path.
+ if fctx1 is not None:
+ content1 = b'\0'
+ if fctx2 is not None:
+ if fctx1 is not None and not fctx1.cmp(fctx2):
+ content2 = b'\0' # not different
+ else:
+ content2 = b'\0\0'
+ else:
+ # normal path: load contents
+ if fctx1 is not None:
+ content1 = fctx1.data()
+ if fctx2 is not None:
+ content2 = fctx2.data()
+
+ if binary and opts.git and not opts.nobinary:
text = mdiff.b85diff(content1, content2)
if text:
header.append('index %s..%s' %
@@ -2620,19 +2656,28 @@
if filename:
results.append((filename, adds, removes, isbinary))
+ # inheader is used to track if a line is in the
+ # header portion of the diff. This helps properly account
+ # for lines that start with '--' or '++'
+ inheader = False
+
for line in lines:
if line.startswith('diff'):
addresult()
- # set numbers to 0 anyway when starting new file
+ # starting a new file diff
+ # set numbers to 0 and reset inheader
+ inheader = True
adds, removes, isbinary = 0, 0, False
if line.startswith('diff --git a/'):
filename = gitre.search(line).group(2)
elif line.startswith('diff -r'):
# format: "diff -r ... -r ... filename"
filename = diffre.search(line).group(1)
- elif line.startswith('+') and not line.startswith('+++ '):
+ elif line.startswith('@@'):
+ inheader = False
+ elif line.startswith('+') and not inheader:
adds += 1
- elif line.startswith('-') and not line.startswith('--- '):
+ elif line.startswith('-') and not inheader:
removes += 1
elif (line.startswith('GIT binary patch') or
line.startswith('Binary file')):
--- a/mercurial/pathencode.c Tue Jun 13 22:24:41 2017 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,765 +0,0 @@
-/*
- pathencode.c - efficient path name encoding
-
- Copyright 2012 Facebook
-
- This software may be used and distributed according to the terms of
- the GNU General Public License, incorporated herein by reference.
-*/
-
-/*
- * An implementation of the name encoding scheme used by the fncache
- * store. The common case is of a path < 120 bytes long, which is
- * handled either in a single pass with no allocations or two passes
- * with a single allocation. For longer paths, multiple passes are
- * required.
- */
-
-#define PY_SSIZE_T_CLEAN
-#include <Python.h>
-#include <assert.h>
-#include <ctype.h>
-#include <stdlib.h>
-#include <string.h>
-
-#include "util.h"
-
-/* state machine for the fast path */
-enum path_state {
- START, /* first byte of a path component */
- A, /* "AUX" */
- AU,
- THIRD, /* third of a 3-byte sequence, e.g. "AUX", "NUL" */
- C, /* "CON" or "COMn" */
- CO,
- COMLPT, /* "COM" or "LPT" */
- COMLPTn,
- L,
- LP,
- N,
- NU,
- P, /* "PRN" */
- PR,
- LDOT, /* leading '.' */
- DOT, /* '.' in a non-leading position */
- H, /* ".h" */
- HGDI, /* ".hg", ".d", or ".i" */
- SPACE,
- DEFAULT /* byte of a path component after the first */
-};
-
-/* state machine for dir-encoding */
-enum dir_state {
- DDOT,
- DH,
- DHGDI,
- DDEFAULT
-};
-
-static inline int inset(const uint32_t bitset[], char c)
-{
- return bitset[((uint8_t)c) >> 5] & (1 << (((uint8_t)c) & 31));
-}
-
-static inline void charcopy(char *dest, Py_ssize_t *destlen, size_t destsize,
- char c)
-{
- if (dest) {
- assert(*destlen < destsize);
- dest[*destlen] = c;
- }
- (*destlen)++;
-}
-
-static inline void memcopy(char *dest, Py_ssize_t *destlen, size_t destsize,
- const void *src, Py_ssize_t len)
-{
- if (dest) {
- assert(*destlen + len < destsize);
- memcpy((void *)&dest[*destlen], src, len);
- }
- *destlen += len;
-}
-
-static inline void hexencode(char *dest, Py_ssize_t *destlen, size_t destsize,
- uint8_t c)
-{
- static const char hexdigit[] = "0123456789abcdef";
-
- charcopy(dest, destlen, destsize, hexdigit[c >> 4]);
- charcopy(dest, destlen, destsize, hexdigit[c & 15]);
-}
-
-/* 3-byte escape: tilde followed by two hex digits */
-static inline void escape3(char *dest, Py_ssize_t *destlen, size_t destsize,
- char c)
-{
- charcopy(dest, destlen, destsize, '~');
- hexencode(dest, destlen, destsize, c);
-}
-
-static Py_ssize_t _encodedir(char *dest, size_t destsize,
- const char *src, Py_ssize_t len)
-{
- enum dir_state state = DDEFAULT;
- Py_ssize_t i = 0, destlen = 0;
-
- while (i < len) {
- switch (state) {
- case DDOT:
- switch (src[i]) {
- case 'd':
- case 'i':
- state = DHGDI;
- charcopy(dest, &destlen, destsize, src[i++]);
- break;
- case 'h':
- state = DH;
- charcopy(dest, &destlen, destsize, src[i++]);
- break;
- default:
- state = DDEFAULT;
- break;
- }
- break;
- case DH:
- if (src[i] == 'g') {
- state = DHGDI;
- charcopy(dest, &destlen, destsize, src[i++]);
- }
- else state = DDEFAULT;
- break;
- case DHGDI:
- if (src[i] == '/') {
- memcopy(dest, &destlen, destsize, ".hg", 3);
- charcopy(dest, &destlen, destsize, src[i++]);
- }
- state = DDEFAULT;
- break;
- case DDEFAULT:
- if (src[i] == '.')
- state = DDOT;
- charcopy(dest, &destlen, destsize, src[i++]);
- break;
- }
- }
-
- return destlen;
-}
-
-PyObject *encodedir(PyObject *self, PyObject *args)
-{
- Py_ssize_t len, newlen;
- PyObject *pathobj, *newobj;
- char *path;
-
- if (!PyArg_ParseTuple(args, "O:encodedir", &pathobj))
- return NULL;
-
- if (PyBytes_AsStringAndSize(pathobj, &path, &len) == -1) {
- PyErr_SetString(PyExc_TypeError, "expected a string");
- return NULL;
- }
-
- newlen = len ? _encodedir(NULL, 0, path, len + 1) : 1;
-
- if (newlen == len + 1) {
- Py_INCREF(pathobj);
- return pathobj;
- }
-
- newobj = PyBytes_FromStringAndSize(NULL, newlen);
-
- if (newobj) {
- assert(PyBytes_Check(newobj));
- Py_SIZE(newobj)--;
- _encodedir(PyBytes_AS_STRING(newobj), newlen, path,
- len + 1);
- }
-
- return newobj;
-}
-
-static Py_ssize_t _encode(const uint32_t twobytes[8], const uint32_t onebyte[8],
- char *dest, Py_ssize_t destlen, size_t destsize,
- const char *src, Py_ssize_t len,
- int encodedir)
-{
- enum path_state state = START;
- Py_ssize_t i = 0;
-
- /*
- * Python strings end with a zero byte, which we use as a
- * terminal token as they are not valid inside path names.
- */
-
- while (i < len) {
- switch (state) {
- case START:
- switch (src[i]) {
- case '/':
- charcopy(dest, &destlen, destsize, src[i++]);
- break;
- case '.':
- state = LDOT;
- escape3(dest, &destlen, destsize, src[i++]);
- break;
- case ' ':
- state = DEFAULT;
- escape3(dest, &destlen, destsize, src[i++]);
- break;
- case 'a':
- state = A;
- charcopy(dest, &destlen, destsize, src[i++]);
- break;
- case 'c':
- state = C;
- charcopy(dest, &destlen, destsize, src[i++]);
- break;
- case 'l':
- state = L;
- charcopy(dest, &destlen, destsize, src[i++]);
- break;
- case 'n':
- state = N;
- charcopy(dest, &destlen, destsize, src[i++]);
- break;
- case 'p':
- state = P;
- charcopy(dest, &destlen, destsize, src[i++]);
- break;
- default:
- state = DEFAULT;
- break;
- }
- break;
- case A:
- if (src[i] == 'u') {
- state = AU;
- charcopy(dest, &destlen, destsize, src[i++]);
- }
- else state = DEFAULT;
- break;
- case AU:
- if (src[i] == 'x') {
- state = THIRD;
- i++;
- }
- else state = DEFAULT;
- break;
- case THIRD:
- state = DEFAULT;
- switch (src[i]) {
- case '.':
- case '/':
- case '\0':
- escape3(dest, &destlen, destsize, src[i - 1]);
- break;
- default:
- i--;
- break;
- }
- break;
- case C:
- if (src[i] == 'o') {
- state = CO;
- charcopy(dest, &destlen, destsize, src[i++]);
- }
- else state = DEFAULT;
- break;
- case CO:
- if (src[i] == 'm') {
- state = COMLPT;
- i++;
- }
- else if (src[i] == 'n') {
- state = THIRD;
- i++;
- }
- else state = DEFAULT;
- break;
- case COMLPT:
- switch (src[i]) {
- case '1': case '2': case '3': case '4': case '5':
- case '6': case '7': case '8': case '9':
- state = COMLPTn;
- i++;
- break;
- default:
- state = DEFAULT;
- charcopy(dest, &destlen, destsize, src[i - 1]);
- break;
- }
- break;
- case COMLPTn:
- state = DEFAULT;
- switch (src[i]) {
- case '.':
- case '/':
- case '\0':
- escape3(dest, &destlen, destsize, src[i - 2]);
- charcopy(dest, &destlen, destsize, src[i - 1]);
- break;
- default:
- memcopy(dest, &destlen, destsize,
- &src[i - 2], 2);
- break;
- }
- break;
- case L:
- if (src[i] == 'p') {
- state = LP;
- charcopy(dest, &destlen, destsize, src[i++]);
- }
- else state = DEFAULT;
- break;
- case LP:
- if (src[i] == 't') {
- state = COMLPT;
- i++;
- }
- else state = DEFAULT;
- break;
- case N:
- if (src[i] == 'u') {
- state = NU;
- charcopy(dest, &destlen, destsize, src[i++]);
- }
- else state = DEFAULT;
- break;
- case NU:
- if (src[i] == 'l') {
- state = THIRD;
- i++;
- }
- else state = DEFAULT;
- break;
- case P:
- if (src[i] == 'r') {
- state = PR;
- charcopy(dest, &destlen, destsize, src[i++]);
- }
- else state = DEFAULT;
- break;
- case PR:
- if (src[i] == 'n') {
- state = THIRD;
- i++;
- }
- else state = DEFAULT;
- break;
- case LDOT:
- switch (src[i]) {
- case 'd':
- case 'i':
- state = HGDI;
- charcopy(dest, &destlen, destsize, src[i++]);
- break;
- case 'h':
- state = H;
- charcopy(dest, &destlen, destsize, src[i++]);
- break;
- default:
- state = DEFAULT;
- break;
- }
- break;
- case DOT:
- switch (src[i]) {
- case '/':
- case '\0':
- state = START;
- memcopy(dest, &destlen, destsize, "~2e", 3);
- charcopy(dest, &destlen, destsize, src[i++]);
- break;
- case 'd':
- case 'i':
- state = HGDI;
- charcopy(dest, &destlen, destsize, '.');
- charcopy(dest, &destlen, destsize, src[i++]);
- break;
- case 'h':
- state = H;
- memcopy(dest, &destlen, destsize, ".h", 2);
- i++;
- break;
- default:
- state = DEFAULT;
- charcopy(dest, &destlen, destsize, '.');
- break;
- }
- break;
- case H:
- if (src[i] == 'g') {
- state = HGDI;
- charcopy(dest, &destlen, destsize, src[i++]);
- }
- else state = DEFAULT;
- break;
- case HGDI:
- if (src[i] == '/') {
- state = START;
- if (encodedir)
- memcopy(dest, &destlen, destsize, ".hg",
- 3);
- charcopy(dest, &destlen, destsize, src[i++]);
- }
- else state = DEFAULT;
- break;
- case SPACE:
- switch (src[i]) {
- case '/':
- case '\0':
- state = START;
- memcopy(dest, &destlen, destsize, "~20", 3);
- charcopy(dest, &destlen, destsize, src[i++]);
- break;
- default:
- state = DEFAULT;
- charcopy(dest, &destlen, destsize, ' ');
- break;
- }
- break;
- case DEFAULT:
- while (inset(onebyte, src[i])) {
- charcopy(dest, &destlen, destsize, src[i++]);
- if (i == len)
- goto done;
- }
- switch (src[i]) {
- case '.':
- state = DOT;
- i++;
- break;
- case ' ':
- state = SPACE;
- i++;
- break;
- case '/':
- state = START;
- charcopy(dest, &destlen, destsize, '/');
- i++;
- break;
- default:
- if (inset(onebyte, src[i])) {
- do {
- charcopy(dest, &destlen,
- destsize, src[i++]);
- } while (i < len &&
- inset(onebyte, src[i]));
- }
- else if (inset(twobytes, src[i])) {
- char c = src[i++];
- charcopy(dest, &destlen, destsize, '_');
- charcopy(dest, &destlen, destsize,
- c == '_' ? '_' : c + 32);
- }
- else
- escape3(dest, &destlen, destsize,
- src[i++]);
- break;
- }
- break;
- }
- }
-done:
- return destlen;
-}
-
-static Py_ssize_t basicencode(char *dest, size_t destsize,
- const char *src, Py_ssize_t len)
-{
- static const uint32_t twobytes[8] = { 0, 0, 0x87fffffe };
-
- static const uint32_t onebyte[8] = {
- 1, 0x2bff3bfa, 0x68000001, 0x2fffffff,
- };
-
- Py_ssize_t destlen = 0;
-
- return _encode(twobytes, onebyte, dest, destlen, destsize,
- src, len, 1);
-}
-
-static const Py_ssize_t maxstorepathlen = 120;
-
-static Py_ssize_t _lowerencode(char *dest, size_t destsize,
- const char *src, Py_ssize_t len)
-{
- static const uint32_t onebyte[8] = {
- 1, 0x2bfffbfb, 0xe8000001, 0x2fffffff
- };
-
- static const uint32_t lower[8] = { 0, 0, 0x7fffffe };
-
- Py_ssize_t i, destlen = 0;
-
- for (i = 0; i < len; i++) {
- if (inset(onebyte, src[i]))
- charcopy(dest, &destlen, destsize, src[i]);
- else if (inset(lower, src[i]))
- charcopy(dest, &destlen, destsize, src[i] + 32);
- else
- escape3(dest, &destlen, destsize, src[i]);
- }
-
- return destlen;
-}
-
-PyObject *lowerencode(PyObject *self, PyObject *args)
-{
- char *path;
- Py_ssize_t len, newlen;
- PyObject *ret;
-
- if (!PyArg_ParseTuple(args, "s#:lowerencode", &path, &len))
- return NULL;
-
- newlen = _lowerencode(NULL, 0, path, len);
- ret = PyBytes_FromStringAndSize(NULL, newlen);
- if (ret)
- _lowerencode(PyBytes_AS_STRING(ret), newlen, path, len);
-
- return ret;
-}
-
-/* See store.py:_auxencode for a description. */
-static Py_ssize_t auxencode(char *dest, size_t destsize,
- const char *src, Py_ssize_t len)
-{
- static const uint32_t twobytes[8];
-
- static const uint32_t onebyte[8] = {
- ~0U, 0xffff3ffe, ~0U, ~0U, ~0U, ~0U, ~0U, ~0U,
- };
-
- return _encode(twobytes, onebyte, dest, 0, destsize, src, len, 0);
-}
-
-static PyObject *hashmangle(const char *src, Py_ssize_t len, const char sha[20])
-{
- static const Py_ssize_t dirprefixlen = 8;
- static const Py_ssize_t maxshortdirslen = 68;
- char *dest;
- PyObject *ret;
-
- Py_ssize_t i, d, p, lastslash = len - 1, lastdot = -1;
- Py_ssize_t destsize, destlen = 0, slop, used;
-
- while (lastslash >= 0 && src[lastslash] != '/') {
- if (src[lastslash] == '.' && lastdot == -1)
- lastdot = lastslash;
- lastslash--;
- }
-
-#if 0
- /* All paths should end in a suffix of ".i" or ".d".
- Unfortunately, the file names in test-hybridencode.py
- violate this rule. */
- if (lastdot != len - 3) {
- PyErr_SetString(PyExc_ValueError,
- "suffix missing or wrong length");
- return NULL;
- }
-#endif
-
- /* If src contains a suffix, we will append it to the end of
- the new string, so make room. */
- destsize = 120;
- if (lastdot >= 0)
- destsize += len - lastdot - 1;
-
- ret = PyBytes_FromStringAndSize(NULL, destsize);
- if (ret == NULL)
- return NULL;
-
- dest = PyBytes_AS_STRING(ret);
- memcopy(dest, &destlen, destsize, "dh/", 3);
-
- /* Copy up to dirprefixlen bytes of each path component, up to
- a limit of maxshortdirslen bytes. */
- for (i = d = p = 0; i < lastslash; i++, p++) {
- if (src[i] == '/') {
- char d = dest[destlen - 1];
- /* After truncation, a directory name may end
- in a space or dot, which are unportable. */
- if (d == '.' || d == ' ')
- dest[destlen - 1] = '_';
- /* The + 3 is to account for "dh/" in the beginning */
- if (destlen > maxshortdirslen + 3)
- break;
- charcopy(dest, &destlen, destsize, src[i]);
- p = -1;
- }
- else if (p < dirprefixlen)
- charcopy(dest, &destlen, destsize, src[i]);
- }
-
- /* Rewind to just before the last slash copied. */
- if (destlen > maxshortdirslen + 3)
- do {
- destlen--;
- } while (destlen > 0 && dest[destlen] != '/');
-
- if (destlen > 3) {
- if (lastslash > 0) {
- char d = dest[destlen - 1];
- /* The last directory component may be
- truncated, so make it safe. */
- if (d == '.' || d == ' ')
- dest[destlen - 1] = '_';
- }
-
- charcopy(dest, &destlen, destsize, '/');
- }
-
- /* Add a prefix of the original file's name. Its length
- depends on the number of bytes left after accounting for
- hash and suffix. */
- used = destlen + 40;
- if (lastdot >= 0)
- used += len - lastdot - 1;
- slop = maxstorepathlen - used;
- if (slop > 0) {
- Py_ssize_t basenamelen =
- lastslash >= 0 ? len - lastslash - 2 : len - 1;
-
- if (basenamelen > slop)
- basenamelen = slop;
- if (basenamelen > 0)
- memcopy(dest, &destlen, destsize, &src[lastslash + 1],
- basenamelen);
- }
-
- /* Add hash and suffix. */
- for (i = 0; i < 20; i++)
- hexencode(dest, &destlen, destsize, sha[i]);
-
- if (lastdot >= 0)
- memcopy(dest, &destlen, destsize, &src[lastdot],
- len - lastdot - 1);
-
- assert(PyBytes_Check(ret));
- Py_SIZE(ret) = destlen;
-
- return ret;
-}
-
-/*
- * Avoiding a trip through Python would improve performance by 50%,
- * but we don't encounter enough long names to be worth the code.
- */
-static int sha1hash(char hash[20], const char *str, Py_ssize_t len)
-{
- static PyObject *shafunc;
- PyObject *shaobj, *hashobj;
-
- if (shafunc == NULL) {
- PyObject *hashlib, *name = PyBytes_FromString("hashlib");
-
- if (name == NULL)
- return -1;
-
- hashlib = PyImport_Import(name);
- Py_DECREF(name);
-
- if (hashlib == NULL) {
- PyErr_SetString(PyExc_ImportError, "hashlib");
- return -1;
- }
- shafunc = PyObject_GetAttrString(hashlib, "sha1");
- Py_DECREF(hashlib);
-
- if (shafunc == NULL) {
- PyErr_SetString(PyExc_AttributeError,
- "module 'hashlib' has no "
- "attribute 'sha1'");
- return -1;
- }
- }
-
- shaobj = PyObject_CallFunction(shafunc, "s#", str, len);
-
- if (shaobj == NULL)
- return -1;
-
- hashobj = PyObject_CallMethod(shaobj, "digest", "");
- Py_DECREF(shaobj);
- if (hashobj == NULL)
- return -1;
-
- if (!PyBytes_Check(hashobj) || PyBytes_GET_SIZE(hashobj) != 20) {
- PyErr_SetString(PyExc_TypeError,
- "result of digest is not a 20-byte hash");
- Py_DECREF(hashobj);
- return -1;
- }
-
- memcpy(hash, PyBytes_AS_STRING(hashobj), 20);
- Py_DECREF(hashobj);
- return 0;
-}
-
-#define MAXENCODE 4096 * 4
-
-static PyObject *hashencode(const char *src, Py_ssize_t len)
-{
- char dired[MAXENCODE];
- char lowered[MAXENCODE];
- char auxed[MAXENCODE];
- Py_ssize_t dirlen, lowerlen, auxlen, baselen;
- char sha[20];
-
- baselen = (len - 5) * 3;
- if (baselen >= MAXENCODE) {
- PyErr_SetString(PyExc_ValueError, "string too long");
- return NULL;
- }
-
- dirlen = _encodedir(dired, baselen, src, len);
- if (sha1hash(sha, dired, dirlen - 1) == -1)
- return NULL;
- lowerlen = _lowerencode(lowered, baselen, dired + 5, dirlen - 5);
- auxlen = auxencode(auxed, baselen, lowered, lowerlen);
- return hashmangle(auxed, auxlen, sha);
-}
-
-PyObject *pathencode(PyObject *self, PyObject *args)
-{
- Py_ssize_t len, newlen;
- PyObject *pathobj, *newobj;
- char *path;
-
- if (!PyArg_ParseTuple(args, "O:pathencode", &pathobj))
- return NULL;
-
- if (PyBytes_AsStringAndSize(pathobj, &path, &len) == -1) {
- PyErr_SetString(PyExc_TypeError, "expected a string");
- return NULL;
- }
-
- if (len > maxstorepathlen)
- newlen = maxstorepathlen + 2;
- else
- newlen = len ? basicencode(NULL, 0, path, len + 1) : 1;
-
- if (newlen <= maxstorepathlen + 1) {
- if (newlen == len + 1) {
- Py_INCREF(pathobj);
- return pathobj;
- }
-
- newobj = PyBytes_FromStringAndSize(NULL, newlen);
-
- if (newobj) {
- assert(PyBytes_Check(newobj));
- Py_SIZE(newobj)--;
- basicencode(PyBytes_AS_STRING(newobj), newlen, path,
- len + 1);
- }
- }
- else
- newobj = hashencode(path, len + 1);
-
- return newobj;
-}
--- a/mercurial/phases.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/phases.py Tue Jun 20 16:33:46 2017 -0400
@@ -423,12 +423,12 @@
if currentphase == oldphase and newphase < oldphase:
with repo.transaction('pushkey-phase') as tr:
advanceboundary(repo, tr, newphase, [bin(nhex)])
- return 1
+ return True
elif currentphase == newphase:
# raced, but got correct result
- return 1
+ return True
else:
- return 0
+ return False
def analyzeremotephases(repo, subset, roots):
"""Compute phases heads and root in a subset of node from root dict
--- a/mercurial/policy.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/policy.py Tue Jun 20 16:33:46 2017 -0400
@@ -18,10 +18,18 @@
# cffi-allow - allow pure Python implementation if cffi version is missing
# py - only load pure Python modules
#
-# By default, require the C extensions for performance reasons.
-policy = b'c'
-policynoc = (b'cffi', b'cffi-allow', b'py')
-policynocffi = (b'c', b'py')
+# By default, fall back to the pure modules so the in-place build can
+# run without recompiling the C extensions. This will be overridden by
+# __modulepolicy__ generated by setup.py.
+policy = b'allow'
+_packageprefs = {
+ # policy: (versioned package, pure package)
+ b'c': (r'cext', None),
+ b'allow': (r'cext', r'pure'),
+ b'cffi': (r'cffi', None),
+ b'cffi-allow': (r'cffi', r'pure'),
+ b'py': (None, r'pure'),
+}
try:
from . import __modulepolicy__
@@ -33,8 +41,8 @@
#
# The canonical way to do this is to test platform.python_implementation().
# But we don't import platform and don't bloat for it here.
-if '__pypy__' in sys.builtin_module_names:
- policy = 'cffi'
+if r'__pypy__' in sys.builtin_module_names:
+ policy = b'cffi'
# Our C extensions aren't yet compatible with Python 3. So use pure Python
# on Python 3 for now.
@@ -43,7 +51,54 @@
# Environment variable can always force settings.
if sys.version_info[0] >= 3:
- if 'HGMODULEPOLICY' in os.environ:
- policy = os.environ['HGMODULEPOLICY'].encode('utf-8')
+ if r'HGMODULEPOLICY' in os.environ:
+ policy = os.environ[r'HGMODULEPOLICY'].encode(r'utf-8')
else:
- policy = os.environ.get('HGMODULEPOLICY', policy)
+ policy = os.environ.get(r'HGMODULEPOLICY', policy)
+
+def _importfrom(pkgname, modname):
+ # from .<pkgname> import <modname> (where . is looked through this module)
+ fakelocals = {}
+ pkg = __import__(pkgname, globals(), fakelocals, [modname], level=1)
+ try:
+ fakelocals[modname] = mod = getattr(pkg, modname)
+ except AttributeError:
+ raise ImportError(r'cannot import name %s' % modname)
+ # force import; fakelocals[modname] may be replaced with the real module
+ getattr(mod, r'__doc__', None)
+ return fakelocals[modname]
+
+# keep in sync with "version" in C modules
+_cextversions = {
+ (r'cext', r'base85'): 1,
+ (r'cext', r'bdiff'): 1,
+ (r'cext', r'diffhelpers'): 1,
+ (r'cext', r'mpatch'): 1,
+ (r'cext', r'osutil'): 1,
+ (r'cext', r'parsers'): 1,
+}
+
+def _checkmod(pkgname, modname, mod):
+ expected = _cextversions.get((pkgname, modname))
+ actual = getattr(mod, r'version', None)
+ if actual != expected:
+ raise ImportError(r'cannot import module %s.%s '
+ r'(expected version: %d, actual: %r)'
+ % (pkgname, modname, expected, actual))
+
+def importmod(modname):
+ """Import module according to policy and check API version"""
+ try:
+ verpkg, purepkg = _packageprefs[policy]
+ except KeyError:
+ raise ImportError(r'invalid HGMODULEPOLICY %r' % policy)
+ assert verpkg or purepkg
+ if verpkg:
+ try:
+ mod = _importfrom(verpkg, modname)
+ _checkmod(verpkg, modname, mod)
+ return mod
+ except ImportError:
+ if not purepkg:
+ raise
+ return _importfrom(purepkg, modname)
--- a/mercurial/posix.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/posix.py Tue Jun 20 16:33:46 2017 -0400
@@ -98,7 +98,8 @@
return (os.lstat(f).st_mode & 0o100 != 0)
def setflags(f, l, x):
- s = os.lstat(f).st_mode
+ st = os.lstat(f)
+ s = st.st_mode
if l:
if not stat.S_ISLNK(s):
# switch file to link
@@ -125,6 +126,14 @@
s = 0o666 & ~umask # avoid restatting for chmod
sx = s & 0o100
+ if st.st_nlink > 1 and bool(x) != bool(sx):
+ # the file is a hardlink, break it
+ with open(f, "rb") as fp:
+ data = fp.read()
+ unlink(f)
+ with open(f, "wb") as fp:
+ fp.write(data)
+
if x and not sx:
# Turn on +x for every +r bit when making a file executable
# and obey umask.
@@ -244,7 +253,17 @@
# create a fixed file to link to; doesn't matter if it
# already exists.
target = 'checklink-target'
- open(os.path.join(cachedir, target), 'w').close()
+ try:
+ open(os.path.join(cachedir, target), 'w').close()
+ except IOError as inst:
+ if inst[0] == errno.EACCES:
+ # If we can't write to cachedir, just pretend
+ # that the fs is readonly and by association
+ # that the fs won't support symlinks. This
+ # seems like the least dangerous way to avoid
+ # data loss.
+ return False
+ raise
try:
os.symlink(target, name)
if cachedir is None:
@@ -474,7 +493,7 @@
def setsignalhandler():
pass
-_wantedkinds = set([stat.S_IFREG, stat.S_IFLNK])
+_wantedkinds = {stat.S_IFREG, stat.S_IFLNK}
def statfiles(files):
'''Stat each file in files. Yield each stat, or None if a file does not
@@ -494,7 +513,7 @@
def getuser():
'''return name of current user'''
- return getpass.getuser()
+ return pycompat.fsencode(getpass.getuser())
def username(uid=None):
"""Return the name of the user with the given uid.
--- a/mercurial/profiling.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/profiling.py Tue Jun 20 16:33:46 2017 -0400
@@ -13,9 +13,21 @@
from . import (
encoding,
error,
+ extensions,
util,
)
+def _loadprofiler(ui, profiler):
+ """load profiler extension. return profile method, or None on failure"""
+ extname = profiler
+ extensions.loadall(ui, whitelist=[extname])
+ try:
+ mod = extensions.find(extname)
+ except KeyError:
+ return None
+ else:
+ return getattr(mod, 'profile', None)
+
@contextlib.contextmanager
def lsprofile(ui, fp):
format = ui.config('profiling', 'format', default='text')
@@ -126,67 +138,98 @@
showmin = ui.configwith(fraction, 'profiling', 'showmin', 0.005)
showmax = ui.configwith(fraction, 'profiling', 'showmax', 0.999)
kwargs.update(minthreshold=showmin, maxthreshold=showmax)
+ elif profformat == 'hotpath':
+ limit = ui.configwith(fraction, 'profiling', 'showmin', 0.05)
+ kwargs['limit'] = limit
statprof.display(fp, data=data, format=displayformat, **kwargs)
-@contextlib.contextmanager
-def profile(ui):
+class profile(object):
"""Start profiling.
Profiling is active when the context manager is active. When the context
manager exits, profiling results will be written to the configured output.
"""
- profiler = encoding.environ.get('HGPROF')
- if profiler is None:
- profiler = ui.config('profiling', 'type', default='stat')
- if profiler not in ('ls', 'stat', 'flame'):
- ui.warn(_("unrecognized profiler '%s' - ignored\n") % profiler)
- profiler = 'stat'
+ def __init__(self, ui, enabled=True):
+ self._ui = ui
+ self._output = None
+ self._fp = None
+ self._fpdoclose = True
+ self._profiler = None
+ self._enabled = enabled
+ self._entered = False
+ self._started = False
- output = ui.config('profiling', 'output')
+ def __enter__(self):
+ self._entered = True
+ if self._enabled:
+ self.start()
+ return self
+
+ def start(self):
+ """Start profiling.
+
+ The profiling will stop at the context exit.
- if output == 'blackbox':
- fp = util.stringio()
- elif output:
- path = ui.expandpath(output)
- fp = open(path, 'wb')
- else:
- fp = ui.ferr
+ If the profiler was already started, this has no effect."""
+ if not self._entered:
+ raise error.ProgrammingError()
+ if self._started:
+ return
+ self._started = True
+ profiler = encoding.environ.get('HGPROF')
+ proffn = None
+ if profiler is None:
+ profiler = self._ui.config('profiling', 'type', default='stat')
+ if profiler not in ('ls', 'stat', 'flame'):
+ # try load profiler from extension with the same name
+ proffn = _loadprofiler(self._ui, profiler)
+ if proffn is None:
+ self._ui.warn(_("unrecognized profiler '%s' - ignored\n")
+ % profiler)
+ profiler = 'stat'
+
+ self._output = self._ui.config('profiling', 'output')
- try:
- if profiler == 'ls':
- proffn = lsprofile
- elif profiler == 'flame':
- proffn = flameprofile
- else:
- proffn = statprofile
+ try:
+ if self._output == 'blackbox':
+ self._fp = util.stringio()
+ elif self._output:
+ path = self._ui.expandpath(self._output)
+ self._fp = open(path, 'wb')
+ else:
+ self._fpdoclose = False
+ self._fp = self._ui.ferr
- with proffn(ui, fp):
- yield
+ if proffn is not None:
+ pass
+ elif profiler == 'ls':
+ proffn = lsprofile
+ elif profiler == 'flame':
+ proffn = flameprofile
+ else:
+ proffn = statprofile
- finally:
- if output:
- if output == 'blackbox':
- val = 'Profile:\n%s' % fp.getvalue()
+ self._profiler = proffn(self._ui, self._fp)
+ self._profiler.__enter__()
+ except: # re-raises
+ self._closefp()
+ raise
+
+ def __exit__(self, exception_type, exception_value, traceback):
+ propagate = None
+ if self._profiler is not None:
+ propagate = self._profiler.__exit__(exception_type, exception_value,
+ traceback)
+ if self._output == 'blackbox':
+ val = 'Profile:\n%s' % self._fp.getvalue()
# ui.log treats the input as a format string,
# so we need to escape any % signs.
val = val.replace('%', '%%')
- ui.log('profile', val)
- fp.close()
-
-@contextlib.contextmanager
-def maybeprofile(ui):
- """Profile if enabled, else do nothing.
-
- This context manager can be used to optionally profile if profiling
- is enabled. Otherwise, it does nothing.
+ self._ui.log('profile', val)
+ self._closefp()
+ return propagate
- The purpose of this context manager is to make calling code simpler:
- just use a single code path for calling into code you may want to profile
- and this function determines whether to start profiling.
- """
- if ui.configbool('profiling', 'enabled'):
- with profile(ui):
- yield
- else:
- yield
+ def _closefp(self):
+ if self._fpdoclose and self._fp is not None:
+ self._fp.close()
--- a/mercurial/pure/bdiff.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/pure/bdiff.py Tue Jun 20 16:33:46 2017 -0400
@@ -11,10 +11,6 @@
import re
import struct
-from . import policy
-policynocffi = policy.policynocffi
-modulepolicy = policy.policy
-
def splitnewlines(text):
'''like str.splitlines, but only split on newlines.'''
lines = [l + '\n' for l in text.split('\n')]
@@ -93,70 +89,3 @@
text = re.sub('[ \t\r]+', ' ', text)
text = text.replace(' \n', '\n')
return text
-
-if modulepolicy not in policynocffi:
- try:
- from _bdiff_cffi import ffi, lib
- except ImportError:
- if modulepolicy == 'cffi': # strict cffi import
- raise
- else:
- def blocks(sa, sb):
- a = ffi.new("struct bdiff_line**")
- b = ffi.new("struct bdiff_line**")
- ac = ffi.new("char[]", str(sa))
- bc = ffi.new("char[]", str(sb))
- l = ffi.new("struct bdiff_hunk*")
- try:
- an = lib.bdiff_splitlines(ac, len(sa), a)
- bn = lib.bdiff_splitlines(bc, len(sb), b)
- if not a[0] or not b[0]:
- raise MemoryError
- count = lib.bdiff_diff(a[0], an, b[0], bn, l)
- if count < 0:
- raise MemoryError
- rl = [None] * count
- h = l.next
- i = 0
- while h:
- rl[i] = (h.a1, h.a2, h.b1, h.b2)
- h = h.next
- i += 1
- finally:
- lib.free(a[0])
- lib.free(b[0])
- lib.bdiff_freehunks(l.next)
- return rl
-
- def bdiff(sa, sb):
- a = ffi.new("struct bdiff_line**")
- b = ffi.new("struct bdiff_line**")
- ac = ffi.new("char[]", str(sa))
- bc = ffi.new("char[]", str(sb))
- l = ffi.new("struct bdiff_hunk*")
- try:
- an = lib.bdiff_splitlines(ac, len(sa), a)
- bn = lib.bdiff_splitlines(bc, len(sb), b)
- if not a[0] or not b[0]:
- raise MemoryError
- count = lib.bdiff_diff(a[0], an, b[0], bn, l)
- if count < 0:
- raise MemoryError
- rl = []
- h = l.next
- la = lb = 0
- while h:
- if h.a1 != la or h.b1 != lb:
- lgt = (b[0] + h.b1).l - (b[0] + lb).l
- rl.append(struct.pack(">lll", (a[0] + la).l - a[0].l,
- (a[0] + h.a1).l - a[0].l, lgt))
- rl.append(str(ffi.buffer((b[0] + lb).l, lgt)))
- la = h.a2
- lb = h.b2
- h = h.next
-
- finally:
- lib.free(a[0])
- lib.free(b[0])
- lib.bdiff_freehunks(l.next)
- return "".join(rl)
--- a/mercurial/pure/mpatch.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/pure/mpatch.py Tue Jun 20 16:33:46 2017 -0400
@@ -9,10 +9,8 @@
import struct
-from . import policy, pycompat
+from .. import pycompat
stringio = pycompat.stringio
-modulepolicy = policy.policy
-policynocffi = policy.policynocffi
class mpatchError(Exception):
"""error raised when a delta cannot be decoded
@@ -127,44 +125,3 @@
outlen += orig - last
return outlen
-
-if modulepolicy not in policynocffi:
- try:
- from _mpatch_cffi import ffi, lib
- except ImportError:
- if modulepolicy == 'cffi': # strict cffi import
- raise
- else:
- @ffi.def_extern()
- def cffi_get_next_item(arg, pos):
- all, bins = ffi.from_handle(arg)
- container = ffi.new("struct mpatch_flist*[1]")
- to_pass = ffi.new("char[]", str(bins[pos]))
- all.append(to_pass)
- r = lib.mpatch_decode(to_pass, len(to_pass) - 1, container)
- if r < 0:
- return ffi.NULL
- return container[0]
-
- def patches(text, bins):
- lgt = len(bins)
- all = []
- if not lgt:
- return text
- arg = (all, bins)
- patch = lib.mpatch_fold(ffi.new_handle(arg),
- lib.cffi_get_next_item, 0, lgt)
- if not patch:
- raise mpatchError("cannot decode chunk")
- outlen = lib.mpatch_calcsize(len(text), patch)
- if outlen < 0:
- lib.mpatch_lfree(patch)
- raise mpatchError("inconsistency detected")
- buf = ffi.new("char[]", outlen)
- if lib.mpatch_apply(buf, text, len(text), patch) < 0:
- lib.mpatch_lfree(patch)
- raise mpatchError("error applying patches")
- res = ffi.buffer(buf, outlen)[:]
- lib.mpatch_lfree(patch)
- return res
-
--- a/mercurial/pure/osutil.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/pure/osutil.py Tue Jun 20 16:33:46 2017 -0400
@@ -13,14 +13,10 @@
import socket
import stat as statmod
-from . import (
- policy,
+from .. import (
pycompat,
)
-modulepolicy = policy.policy
-policynocffi = policy.policynocffi
-
def _mode_to_kind(mode):
if statmod.S_ISREG(mode):
return statmod.S_IFREG
@@ -38,7 +34,7 @@
return statmod.S_IFSOCK
return mode
-def listdirpure(path, stat=False, skip=None):
+def listdir(path, stat=False, skip=None):
'''listdir(path, stat=False) -> list_of_tuples
Return a sorted list containing information about the entries
@@ -68,96 +64,6 @@
result.append((fn, _mode_to_kind(st.st_mode)))
return result
-ffi = None
-if modulepolicy not in policynocffi and pycompat.sysplatform == 'darwin':
- try:
- from _osutil_cffi import ffi, lib
- except ImportError:
- if modulepolicy == 'cffi': # strict cffi import
- raise
-
-if pycompat.sysplatform == 'darwin' and ffi is not None:
- listdir_batch_size = 4096
- # tweakable number, only affects performance, which chunks
- # of bytes do we get back from getattrlistbulk
-
- attrkinds = [None] * 20 # we need the max no for enum VXXX, 20 is plenty
-
- attrkinds[lib.VREG] = statmod.S_IFREG
- attrkinds[lib.VDIR] = statmod.S_IFDIR
- attrkinds[lib.VLNK] = statmod.S_IFLNK
- attrkinds[lib.VBLK] = statmod.S_IFBLK
- attrkinds[lib.VCHR] = statmod.S_IFCHR
- attrkinds[lib.VFIFO] = statmod.S_IFIFO
- attrkinds[lib.VSOCK] = statmod.S_IFSOCK
-
- class stat_res(object):
- def __init__(self, st_mode, st_mtime, st_size):
- self.st_mode = st_mode
- self.st_mtime = st_mtime
- self.st_size = st_size
-
- tv_sec_ofs = ffi.offsetof("struct timespec", "tv_sec")
- buf = ffi.new("char[]", listdir_batch_size)
-
- def listdirinternal(dfd, req, stat, skip):
- ret = []
- while True:
- r = lib.getattrlistbulk(dfd, req, buf, listdir_batch_size, 0)
- if r == 0:
- break
- if r == -1:
- raise OSError(ffi.errno, os.strerror(ffi.errno))
- cur = ffi.cast("val_attrs_t*", buf)
- for i in range(r):
- lgt = cur.length
- assert lgt == ffi.cast('uint32_t*', cur)[0]
- ofs = cur.name_info.attr_dataoffset
- str_lgt = cur.name_info.attr_length
- base_ofs = ffi.offsetof('val_attrs_t', 'name_info')
- name = str(ffi.buffer(ffi.cast("char*", cur) + base_ofs + ofs,
- str_lgt - 1))
- tp = attrkinds[cur.obj_type]
- if name == "." or name == "..":
- continue
- if skip == name and tp == statmod.S_ISDIR:
- return []
- if stat:
- mtime = cur.mtime.tv_sec
- mode = (cur.accessmask & ~lib.S_IFMT)| tp
- ret.append((name, tp, stat_res(st_mode=mode, st_mtime=mtime,
- st_size=cur.datalength)))
- else:
- ret.append((name, tp))
- cur = ffi.cast("val_attrs_t*", int(ffi.cast("intptr_t", cur))
- + lgt)
- return ret
-
- def listdir(path, stat=False, skip=None):
- req = ffi.new("struct attrlist*")
- req.bitmapcount = lib.ATTR_BIT_MAP_COUNT
- req.commonattr = (lib.ATTR_CMN_RETURNED_ATTRS |
- lib.ATTR_CMN_NAME |
- lib.ATTR_CMN_OBJTYPE |
- lib.ATTR_CMN_ACCESSMASK |
- lib.ATTR_CMN_MODTIME)
- req.fileattr = lib.ATTR_FILE_DATALENGTH
- dfd = lib.open(path, lib.O_RDONLY, 0)
- if dfd == -1:
- raise OSError(ffi.errno, os.strerror(ffi.errno))
-
- try:
- ret = listdirinternal(dfd, req, stat, skip)
- finally:
- try:
- lib.close(dfd)
- except BaseException:
- pass # we ignore all the errors from closing, not
- # much we can do about that
- return ret
-else:
- listdir = listdirpure
-
if pycompat.osname != 'nt':
posixfile = open
--- a/mercurial/pure/parsers.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/pure/parsers.py Tue Jun 20 16:33:46 2017 -0400
@@ -10,8 +10,8 @@
import struct
import zlib
-from .node import nullid
-from . import pycompat
+from ..node import nullid
+from .. import pycompat
stringio = pycompat.stringio
--- a/mercurial/pvec.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/pvec.py Tue Jun 20 16:33:46 2017 -0400
@@ -52,7 +52,6 @@
from .node import nullrev
from . import (
- base85,
util,
)
@@ -166,13 +165,13 @@
else:
pvc[n] = _mergevec(pvc[p1], pvc[p2], node)
bs = _join(*pvc[ctx.rev()])
- return pvec(base85.b85encode(bs))
+ return pvec(util.b85encode(bs))
class pvec(object):
def __init__(self, hashorctx):
if isinstance(hashorctx, str):
self._bs = hashorctx
- self._depth, self._vec = _split(base85.b85decode(hashorctx))
+ self._depth, self._vec = _split(util.b85decode(hashorctx))
else:
self._vec = ctxpvec(hashorctx)
--- a/mercurial/pycompat.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/pycompat.py Tue Jun 20 16:33:46 2017 -0400
@@ -32,6 +32,9 @@
import socketserver
import xmlrpc.client as xmlrpclib
+empty = _queue.Empty
+queue = _queue.Queue
+
def identity(a):
return a
@@ -43,7 +46,6 @@
fsencode = os.fsencode
fsdecode = os.fsdecode
- # A bytes version of os.name.
oslinesep = os.linesep.encode('ascii')
osname = os.name.encode('ascii')
ospathsep = os.pathsep.encode('ascii')
@@ -87,6 +89,14 @@
>>> s = bytestr(b'foo')
>>> assert s is bytestr(s)
+ __bytes__() should be called if provided:
+
+ >>> class bytesable(object):
+ ... def __bytes__(self):
+ ... return b'bytes'
+ >>> bytestr(bytesable())
+ b'bytes'
+
There's no implicit conversion from non-ascii str as its encoding is
unknown:
@@ -127,7 +137,8 @@
def __new__(cls, s=b''):
if isinstance(s, bytestr):
return s
- if not isinstance(s, (bytes, bytearray)):
+ if (not isinstance(s, (bytes, bytearray))
+ and not hasattr(s, u'__bytes__')): # hasattr-py3-only
s = str(s).encode(u'ascii')
return bytes.__new__(cls, s)
@@ -164,6 +175,26 @@
return s
return s.decode(u'latin-1')
+ def strurl(url):
+ """Converts a bytes url back to str"""
+ return url.decode(u'ascii')
+
+ def bytesurl(url):
+ """Converts a str url to bytes by encoding in ascii"""
+ return url.encode(u'ascii')
+
+ def raisewithtb(exc, tb):
+ """Raise exception with the given traceback"""
+ raise exc.with_traceback(tb)
+
+ def getdoc(obj):
+ """Get docstring as bytes; may be None so gettext() won't confuse it
+ with _('')"""
+ doc = getattr(obj, u'__doc__', None)
+ if doc is None:
+ return doc
+ return sysbytes(doc)
+
def _wrapattrfunc(f):
@functools.wraps(f)
def w(object, name, *args):
@@ -181,10 +212,13 @@
def open(name, mode='r', buffering=-1):
return builtins.open(name, sysstr(mode), buffering)
- # getopt.getopt() on Python 3 deals with unicodes internally so we cannot
- # pass bytes there. Passing unicodes will result in unicodes as return
- # values which we need to convert again to bytes.
def getoptb(args, shortlist, namelist):
+ """
+ Takes bytes arguments, converts them to unicode, pass them to
+ getopt.getopt(), convert the returned values back to bytes and then
+ return them for Python 3 compatibility as getopt.getopt() don't accepts
+ bytes on Python 3.
+ """
args = [a.decode('latin-1') for a in args]
shortlist = shortlist.decode('latin-1')
namelist = [a.decode('latin-1') for a in namelist]
@@ -194,24 +228,30 @@
args = [a.encode('latin-1') for a in args]
return opts, args
- # keys of keyword arguments in Python need to be strings which are unicodes
- # Python 3. This function takes keyword arguments, convert the keys to str.
def strkwargs(dic):
+ """
+ Converts the keys of a python dictonary to str i.e. unicodes so that
+ they can be passed as keyword arguments as dictonaries with bytes keys
+ can't be passed as keyword arguments to functions on Python 3.
+ """
dic = dict((k.decode('latin-1'), v) for k, v in dic.iteritems())
return dic
- # keys of keyword arguments need to be unicode while passing into
- # a function. This function helps us to convert those keys back to bytes
- # again as we need to deal with bytes.
def byteskwargs(dic):
+ """
+ Converts keys of python dictonaries to bytes as they were converted to
+ str to pass that dictonary as a keyword argument on Python 3.
+ """
dic = dict((k.encode('latin-1'), v) for k, v in dic.iteritems())
return dic
- # shlex.split() accepts unicodes on Python 3. This function takes bytes
- # argument, convert it into unicodes, pass into shlex.split(), convert the
- # returned value to bytes and return that.
# TODO: handle shlex.shlex().
def shlexsplit(s):
+ """
+ Takes bytes argument, convert it to str i.e. unicodes, pass that into
+ shlex.split(), convert the returned value to bytes and return that for
+ Python 3 compatibility as shelx.split() don't accept bytes on Python 3.
+ """
ret = shlex.split(s.decode('latin-1'))
return [a.encode('latin-1') for a in ret]
@@ -223,11 +263,19 @@
iterbytestr = iter
sysbytes = identity
sysstr = identity
+ strurl = identity
+ bytesurl = identity
- # Partial backport from os.py in Python 3, which only accepts bytes.
- # In Python 2, our paths should only ever be bytes, a unicode path
- # indicates a bug.
+ # this can't be parsed on Python 3
+ exec('def raisewithtb(exc, tb):\n'
+ ' raise exc, None, tb\n')
+
def fsencode(filename):
+ """
+ Partial backport from os.py in Python 3, which only accepts bytes.
+ In Python 2, our paths should only ever be bytes, a unicode path
+ indicates a bug.
+ """
if isinstance(filename, str):
return filename
else:
@@ -238,6 +286,9 @@
# better not to touch Python 2 part as it's already working fine.
fsdecode = identity
+ def getdoc(obj):
+ return getattr(obj, '__doc__', None)
+
def getoptb(args, shortlist, namelist):
return getopt.getopt(args, shortlist, namelist)
@@ -261,9 +312,6 @@
stringio = cStringIO.StringIO
maplist = map
-empty = _queue.Empty
-queue = _queue.Queue
-
class _pycompatstub(object):
def __init__(self):
self._aliases = {}
--- a/mercurial/rcutil.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/rcutil.py Tue Jun 20 16:33:46 2017 -0400
@@ -11,7 +11,6 @@
from . import (
encoding,
- osutil,
pycompat,
util,
)
@@ -30,7 +29,7 @@
p = util.expandpath(path)
if os.path.isdir(p):
join = os.path.join
- return [join(p, f) for f, k in osutil.listdir(p) if f.endswith('.rc')]
+ return [join(p, f) for f, k in util.listdir(p) if f.endswith('.rc')]
return [p]
def envrcitems(env=None):
--- a/mercurial/registrar.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/registrar.py Tue Jun 20 16:33:46 2017 -0400
@@ -96,6 +96,47 @@
"""
pass
+class command(_funcregistrarbase):
+ """Decorator to register a command function to table
+
+ This class receives a command table as its argument. The table should
+ be a dict.
+
+ The created object can be used as a decorator for adding commands to
+ that command table. This accepts multiple arguments to define a command.
+
+ The first argument is the command name.
+
+ The options argument is an iterable of tuples defining command arguments.
+ See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
+
+ The synopsis argument defines a short, one line summary of how to use the
+ command. This shows up in the help output.
+
+ The norepo argument defines whether the command does not require a
+ local repository. Most commands operate against a repository, thus the
+ default is False.
+
+ The optionalrepo argument defines whether the command optionally requires
+ a local repository.
+
+ The inferrepo argument defines whether to try to find a repository from the
+ command line arguments. If True, arguments will be examined for potential
+ repository locations. See ``findrepo()``. If a repository is found, it
+ will be used.
+ """
+
+ def _doregister(self, func, name, options=(), synopsis=None,
+ norepo=False, optionalrepo=False, inferrepo=False):
+ func.norepo = norepo
+ func.optionalrepo = optionalrepo
+ func.inferrepo = inferrepo
+ if synopsis:
+ self._table[name] = func, list(options), synopsis
+ else:
+ self._table[name] = func, list(options)
+ return func
+
class revsetpredicate(_funcregistrarbase):
"""Decorator to register revset predicate
--- a/mercurial/repair.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/repair.py Tue Jun 20 16:33:46 2017 -0400
@@ -16,18 +16,16 @@
from . import (
bundle2,
changegroup,
+ discovery,
error,
exchange,
obsolete,
util,
)
-def _bundle(repo, bases, heads, node, suffix, compress=True):
+def _bundle(repo, bases, heads, node, suffix, compress=True, obsolescence=True):
"""create a bundle with the specified revisions as a backup"""
- cgversion = changegroup.safeversion(repo)
- cg = changegroup.changegroupsubset(repo, bases, heads, 'strip',
- version=cgversion)
backupdir = "strip-backup"
vfs = repo.vfs
if not vfs.isdir(backupdir):
@@ -39,6 +37,7 @@
totalhash = hashlib.sha1(''.join(allhashes)).hexdigest()
name = "%s/%s-%s-%s.hg" % (backupdir, short(node), totalhash[:8], suffix)
+ cgversion = changegroup.safeversion(repo)
comp = None
if cgversion != '01':
bundletype = "HG20"
@@ -48,8 +47,11 @@
bundletype = "HG10BZ"
else:
bundletype = "HG10UN"
- return bundle2.writebundle(repo.ui, cg, name, bundletype, vfs,
- compression=comp)
+
+ outgoing = discovery.outgoing(repo, missingroots=bases, missingheads=heads)
+ contentopts = {'cg.version': cgversion, 'obsolescence': obsolescence}
+ return bundle2.writenewbundle(repo.ui, repo, 'strip', name, bundletype,
+ outgoing, contentopts, vfs, compression=comp)
def _collectfiles(repo, striprev):
"""find out the filelogs affected by the strip"""
@@ -74,8 +76,12 @@
return s
def strip(ui, repo, nodelist, backup=True, topic='backup'):
- # This function operates within a transaction of its own, but does
- # not take any lock on the repo.
+ # This function requires the caller to lock the repo, but it operates
+ # within a transaction of its own, and thus requires there to be no current
+ # transaction when it is called.
+ if repo.currenttransaction() is not None:
+ raise error.ProgrammingError('cannot strip from inside a transaction')
+
# Simple way to maintain backwards compatibility for this
# argument.
if backup in ['none', 'strip']:
@@ -120,6 +126,13 @@
savebases = [cl.node(r) for r in saverevs]
stripbases = [cl.node(r) for r in tostrip]
+ stripobsidx = obsmarkers = ()
+ if repo.ui.configbool('devel', 'strip-obsmarkers', True):
+ obsmarkers = obsolete.exclusivemarkers(repo, stripbases)
+ if obsmarkers:
+ stripobsidx = [i for i, m in enumerate(repo.obsstore)
+ if m in obsmarkers]
+
# For a set s, max(parents(s) - s) is the same as max(heads(::s - s)), but
# is much faster
newbmtarget = repo.revs('max(parents(%ld) - (%ld))', tostrip, tostrip)
@@ -148,16 +161,16 @@
tmpbundlefile = None
if saveheads:
# do not compress temporary bundle if we remove it from disk later
+ #
+ # We do not include obsolescence, it might re-introduce prune markers
+ # we are trying to strip. This is harmless since the stripped markers
+ # are already backed up and we did not touched the markers for the
+ # saved changesets.
tmpbundlefile = _bundle(repo, savebases, saveheads, node, 'temp',
- compress=False)
+ compress=False, obsolescence=False)
mfst = repo.manifestlog._revlog
- curtr = repo.currenttransaction()
- if curtr is not None:
- del curtr # avoid carrying reference to transaction for nothing
- raise error.ProgrammingError('cannot strip from inside a transaction')
-
try:
with repo.transaction("strip") as tr:
offset = len(tr.entries)
@@ -165,13 +178,8 @@
tr.startgroup()
cl.strip(striprev, tr)
mfst.strip(striprev, tr)
- if 'treemanifest' in repo.requirements: # safe but unnecessary
- # otherwise
- for unencoded, encoded, size in repo.store.datafiles():
- if (unencoded.startswith('meta/') and
- unencoded.endswith('00manifest.i')):
- dir = unencoded[5:-12]
- repo.manifestlog._revlog.dirlog(dir).strip(striprev, tr)
+ striptrees(repo, tr, striprev, files)
+
for fn in files:
repo.file(fn).strip(striprev, tr)
tr.endgroup()
@@ -183,6 +191,9 @@
if troffset == 0:
repo.store.markremoved(file)
+ deleteobsmarkers(repo.obsstore, stripobsidx)
+ del repo.obsstore
+
if tmpbundlefile:
ui.note(_("adding branch\n"))
f = vfs.open(tmpbundlefile, "rb")
@@ -190,15 +201,15 @@
if not repo.ui.verbose:
# silence internal shuffling chatter
repo.ui.pushbuffer()
+ tmpbundleurl = 'bundle:' + vfs.join(tmpbundlefile)
if isinstance(gen, bundle2.unbundle20):
with repo.transaction('strip') as tr:
- tr.hookargs = {'source': 'strip',
- 'url': 'bundle:' + vfs.join(tmpbundlefile)}
bundle2.applybundle(repo, gen, tr, source='strip',
- url='bundle:' + vfs.join(tmpbundlefile))
+ url=tmpbundleurl)
else:
- gen.apply(repo, 'strip', 'bundle:' + vfs.join(tmpbundlefile),
- True)
+ txnname = "strip\n%s" % util.hidepassword(tmpbundleurl)
+ with repo.transaction(txnname) as tr:
+ gen.apply(repo, tr, 'strip', tmpbundleurl, True)
if not repo.ui.verbose:
repo.ui.popbuffer()
f.close()
@@ -207,9 +218,8 @@
for m in updatebm:
bm[m] = repo[newbmtarget].node()
- with repo.lock():
- with repo.transaction('repair') as tr:
- bm.recordchange(tr)
+ with repo.transaction('repair') as tr:
+ bm.recordchange(tr)
# remove undo files
for undovfs, undofile in repo.undofiles():
@@ -240,6 +250,15 @@
# extensions can use it
return backupfile
+def striptrees(repo, tr, striprev, files):
+ if 'treemanifest' in repo.requirements: # safe but unnecessary
+ # otherwise
+ for unencoded, encoded, size in repo.store.datafiles():
+ if (unencoded.startswith('meta/') and
+ unencoded.endswith('00manifest.i')):
+ dir = unencoded[5:-12]
+ repo.manifestlog._revlog.dirlog(dir).strip(striprev, tr)
+
def rebuildfncache(ui, repo):
"""Rebuilds the fncache file from repo history.
--- a/mercurial/repoview.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/repoview.py Tue Jun 20 16:33:46 2017 -0400
@@ -9,13 +9,9 @@
from __future__ import absolute_import
import copy
-import hashlib
-import heapq
-import struct
from .node import nullrev
from . import (
- error,
obsolete,
phases,
tags as tagsmod,
@@ -32,141 +28,40 @@
lead to crashes."""
return obsolete.getrevs(repo, 'obsolete')
-def _getstatichidden(repo):
- """Revision to be hidden (disregarding dynamic blocker)
-
- To keep a consistent graph, we cannot hide any revisions with
- non-hidden descendants. This function computes the set of
- revisions that could be hidden while keeping the graph consistent.
-
- A second pass will be done to apply "dynamic blocker" like bookmarks or
- working directory parents.
-
+def pinnedrevs(repo):
+ """revisions blocking hidden changesets from being filtered
"""
- assert not repo.changelog.filteredrevs
- hidden = set(hideablerevs(repo))
- if hidden:
- getphase = repo._phasecache.phase
- getparentrevs = repo.changelog.parentrevs
- # Skip heads which are public (guaranteed to not be hidden)
- heap = [-r for r in repo.changelog.headrevs() if getphase(repo, r)]
- heapq.heapify(heap)
- heappop = heapq.heappop
- heappush = heapq.heappush
- seen = set() # no need to init it with heads, they have no children
- while heap:
- rev = -heappop(heap)
- # All children have been processed so at that point, if no children
- # removed 'rev' from the 'hidden' set, 'rev' is going to be hidden.
- blocker = rev not in hidden
- for parent in getparentrevs(rev):
- if parent == nullrev:
- continue
- if blocker:
- # If visible, ensure parent will be visible too
- hidden.discard(parent)
- # - Avoid adding the same revision twice
- # - Skip nodes which are public (guaranteed to not be hidden)
- pre = len(seen)
- seen.add(parent)
- if pre < len(seen) and getphase(repo, rev):
- heappush(heap, -parent)
- return hidden
-
-def _getdynamicblockers(repo):
- """Non-cacheable revisions blocking hidden changesets from being filtered.
-
- Get revisions that will block hidden changesets and are likely to change,
- but unlikely to create hidden blockers. They won't be cached, so be careful
- with adding additional computation."""
cl = repo.changelog
- blockers = set()
- blockers.update([par.rev() for par in repo[None].parents()])
- blockers.update([cl.rev(bm) for bm in repo._bookmarks.values()])
+ pinned = set()
+ pinned.update([par.rev() for par in repo[None].parents()])
+ pinned.update([cl.rev(bm) for bm in repo._bookmarks.values()])
tags = {}
tagsmod.readlocaltags(repo.ui, repo, tags, {})
if tags:
rev, nodemap = cl.rev, cl.nodemap
- blockers.update(rev(t[0]) for t in tags.values() if t[0] in nodemap)
- return blockers
+ pinned.update(rev(t[0]) for t in tags.values() if t[0] in nodemap)
+ return pinned
-cacheversion = 1
-cachefile = 'cache/hidden'
-
-def cachehash(repo, hideable):
- """return sha1 hash of repository data to identify a valid cache.
- We calculate a sha1 of repo heads and the content of the obsstore and write
- it to the cache. Upon reading we can easily validate by checking the hash
- against the stored one and discard the cache in case the hashes don't match.
- """
- h = hashlib.sha1()
- h.update(''.join(repo.heads()))
- h.update('%d' % hash(frozenset(hideable)))
- return h.digest()
+def _revealancestors(pfunc, hidden, revs):
+ """reveals contiguous chains of hidden ancestors of 'revs' by removing them
+ from 'hidden'
-def _writehiddencache(cachefile, cachehash, hidden):
- """write hidden data to a cache file"""
- data = struct.pack('>%ii' % len(hidden), *sorted(hidden))
- cachefile.write(struct.pack(">H", cacheversion))
- cachefile.write(cachehash)
- cachefile.write(data)
+ - pfunc(r): a funtion returning parent of 'r',
+ - hidden: the (preliminary) hidden revisions, to be updated
+ - revs: iterable of revnum,
-def trywritehiddencache(repo, hideable, hidden):
- """write cache of hidden changesets to disk
-
- Will not write the cache if a wlock cannot be obtained lazily.
- The cache consists of a head of 22byte:
- 2 byte version number of the cache
- 20 byte sha1 to validate the cache
- n*4 byte hidden revs
+ (Ancestors are revealed exclusively, i.e. the elements in 'revs' are
+ *not* revealed)
"""
- wlock = fh = None
- try:
- wlock = repo.wlock(wait=False)
- # write cache to file
- newhash = cachehash(repo, hideable)
- fh = repo.vfs.open(cachefile, 'w+b', atomictemp=True)
- _writehiddencache(fh, newhash, hidden)
- fh.close()
- except (IOError, OSError):
- repo.ui.debug('error writing hidden changesets cache\n')
- except error.LockHeld:
- repo.ui.debug('cannot obtain lock to write hidden changesets cache\n')
- finally:
- if wlock:
- wlock.release()
-
-def _readhiddencache(repo, cachefilename, newhash):
- hidden = fh = None
- try:
- if repo.vfs.exists(cachefile):
- fh = repo.vfs.open(cachefile, 'rb')
- version, = struct.unpack(">H", fh.read(2))
- oldhash = fh.read(20)
- if (cacheversion, oldhash) == (version, newhash):
- # cache is valid, so we can start reading the hidden revs
- data = fh.read()
- count = len(data) / 4
- hidden = frozenset(struct.unpack('>%ii' % count, data))
- return hidden
- except struct.error:
- repo.ui.debug('corrupted hidden cache\n')
- # No need to fix the content as it will get rewritten
- return None
- except (IOError, OSError):
- repo.ui.debug('cannot read hidden cache\n')
- return None
- finally:
- if fh:
- fh.close()
-
-def tryreadcache(repo, hideable):
- """read a cache if the cache exists and is valid, otherwise returns None."""
- newhash = cachehash(repo, hideable)
- return _readhiddencache(repo, cachefile, newhash)
+ stack = list(revs)
+ while stack:
+ for p in pfunc(stack.pop()):
+ if p != nullrev and p in hidden:
+ hidden.remove(p)
+ stack.append(p)
def computehidden(repo):
"""compute the set of hidden revision to filter
@@ -174,22 +69,16 @@
During most operation hidden should be filtered."""
assert not repo.changelog.filteredrevs
- hidden = frozenset()
- hideable = hideablerevs(repo)
- if hideable:
- cl = repo.changelog
- hidden = tryreadcache(repo, hideable)
- if hidden is None:
- hidden = frozenset(_getstatichidden(repo))
- trywritehiddencache(repo, hideable, hidden)
+ hidden = hideablerevs(repo)
+ if hidden:
+ hidden = set(hidden - pinnedrevs(repo))
+ pfunc = repo.changelog.parentrevs
+ mutablephases = (phases.draft, phases.secret)
+ mutable = repo._phasecache.getrevset(repo, mutablephases)
- # check if we have wd parents, bookmarks or tags pointing to hidden
- # changesets and remove those.
- dynamic = hidden & _getdynamicblockers(repo)
- if dynamic:
- blocked = cl.ancestors(dynamic, inclusive=True)
- hidden = frozenset(r for r in hidden if r not in blocked)
- return hidden
+ visible = mutable - hidden
+ _revealancestors(pfunc, hidden, visible)
+ return frozenset(hidden)
def computeunserved(repo):
"""compute the set of revision that should be filtered when used a server
@@ -354,10 +243,3 @@
def __delattr__(self, attr):
return delattr(self._unfilteredrepo, attr)
-
- # The `requirements` attribute is initialized during __init__. But
- # __getattr__ won't be called as it also exists on the class. We need
- # explicit forwarding to main repo here
- @property
- def requirements(self):
- return self._unfilteredrepo.requirements
--- a/mercurial/revlog.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/revlog.py Tue Jun 20 16:33:46 2017 -0400
@@ -26,18 +26,23 @@
hex,
nullid,
nullrev,
+ wdirhex,
+ wdirid,
+ wdirrev,
)
from .i18n import _
from . import (
ancestor,
error,
mdiff,
- parsers,
+ policy,
pycompat,
templatefilters,
util,
)
+parsers = policy.importmod(r'parsers')
+
_pack = struct.pack
_unpack = struct.unpack
# Aliased for performance.
@@ -45,13 +50,17 @@
# revlog header flags
REVLOGV0 = 0
-REVLOGNG = 1
-REVLOGNGINLINEDATA = (1 << 16)
-REVLOGGENERALDELTA = (1 << 17)
-REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA
-REVLOG_DEFAULT_FORMAT = REVLOGNG
+REVLOGV1 = 1
+# Dummy value until file format is finalized.
+# Reminder: change the bounds check in revlog.__init__ when this is changed.
+REVLOGV2 = 0xDEAD
+FLAG_INLINE_DATA = (1 << 16)
+FLAG_GENERALDELTA = (1 << 17)
+REVLOG_DEFAULT_FLAGS = FLAG_INLINE_DATA
+REVLOG_DEFAULT_FORMAT = REVLOGV1
REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
-REVLOGNG_FLAGS = REVLOGNGINLINEDATA | REVLOGGENERALDELTA
+REVLOGV1_FLAGS = FLAG_INLINE_DATA | FLAG_GENERALDELTA
+REVLOGV2_FLAGS = REVLOGV1_FLAGS
# revlog index flags
REVIDX_ISCENSORED = (1 << 15) # revision has censor metadata, must be verified
@@ -187,7 +196,7 @@
def packentry(self, entry, node, version, rev):
if gettype(entry[0]):
- raise RevlogError(_("index entry flags need RevlogNG"))
+ raise RevlogError(_('index entry flags need revlog version 1'))
e2 = (getoffset(entry[0]), entry[1], entry[3], entry[4],
node(entry[5]), node(entry[6]), entry[7])
return _pack(indexformatv0, *e2)
@@ -252,7 +261,7 @@
If checkambig, indexfile is opened with checkambig=True at
writing, to avoid file stat ambiguity.
"""
- def __init__(self, opener, indexfile, checkambig=False):
+ def __init__(self, opener, indexfile, datafile=None, checkambig=False):
"""
create a revlog object
@@ -260,7 +269,7 @@
and can be used to implement COW semantics or the like.
"""
self.indexfile = indexfile
- self.datafile = indexfile[:-2] + ".d"
+ self.datafile = datafile or (indexfile[:-2] + ".d")
self.opener = opener
# When True, indexfile is opened with checkambig=True at writing, to
# avoid file stat ambiguity.
@@ -286,9 +295,12 @@
v = REVLOG_DEFAULT_VERSION
opts = getattr(opener, 'options', None)
if opts is not None:
- if 'revlogv1' in opts:
+ if 'revlogv2' in opts:
+ # version 2 revlogs always use generaldelta.
+ v = REVLOGV2 | FLAG_GENERALDELTA | FLAG_INLINE_DATA
+ elif 'revlogv1' in opts:
if 'generaldelta' in opts:
- v |= REVLOGGENERALDELTA
+ v |= FLAG_GENERALDELTA
else:
v = 0
if 'chunkcachesize' in opts:
@@ -322,19 +334,28 @@
raise
self.version = v
- self._inline = v & REVLOGNGINLINEDATA
- self._generaldelta = v & REVLOGGENERALDELTA
+ self._inline = v & FLAG_INLINE_DATA
+ self._generaldelta = v & FLAG_GENERALDELTA
flags = v & ~0xFFFF
fmt = v & 0xFFFF
- if fmt == REVLOGV0 and flags:
- raise RevlogError(_("index %s unknown flags %#04x for format v0")
- % (self.indexfile, flags >> 16))
- elif fmt == REVLOGNG and flags & ~REVLOGNG_FLAGS:
- raise RevlogError(_("index %s unknown flags %#04x for revlogng")
- % (self.indexfile, flags >> 16))
- elif fmt > REVLOGNG:
- raise RevlogError(_("index %s unknown format %d")
- % (self.indexfile, fmt))
+ if fmt == REVLOGV0:
+ if flags:
+ raise RevlogError(_('unknown flags (%#04x) in version %d '
+ 'revlog %s') %
+ (flags >> 16, fmt, self.indexfile))
+ elif fmt == REVLOGV1:
+ if flags & ~REVLOGV1_FLAGS:
+ raise RevlogError(_('unknown flags (%#04x) in version %d '
+ 'revlog %s') %
+ (flags >> 16, fmt, self.indexfile))
+ elif fmt == REVLOGV2:
+ if flags & ~REVLOGV2_FLAGS:
+ raise RevlogError(_('unknown flags (%#04x) in version %d '
+ 'revlog %s') %
+ (flags >> 16, fmt, self.indexfile))
+ else:
+ raise RevlogError(_('unknown version (%d) in revlog %s') %
+ (fmt, self.indexfile))
self.storedeltachains = True
@@ -409,6 +430,8 @@
raise
except RevlogError:
# parsers.c radix tree lookup failed
+ if node == wdirid:
+ raise error.WdirUnsupported
raise LookupError(node, self.indexfile, _('no node'))
except KeyError:
# pure python cache lookup failed
@@ -423,6 +446,8 @@
if v == node:
self._nodepos = r - 1
return r
+ if node == wdirid:
+ raise error.WdirUnsupported
raise LookupError(node, self.indexfile, _('no node'))
# Accessors for index entries.
@@ -475,10 +500,20 @@
return self.index[rev][4]
def parentrevs(self, rev):
- return self.index[rev][5:7]
+ try:
+ return self.index[rev][5:7]
+ except IndexError:
+ if rev == wdirrev:
+ raise error.WdirUnsupported
+ raise
def node(self, rev):
- return self.index[rev][7]
+ try:
+ return self.index[rev][7]
+ except IndexError:
+ if rev == wdirrev:
+ raise error.WdirUnsupported
+ raise
# Derived from index values.
@@ -913,8 +948,8 @@
stop = []
stoprevs = set([self.rev(n) for n in stop])
startrev = self.rev(start)
- reachable = set((startrev,))
- heads = set((startrev,))
+ reachable = {startrev}
+ heads = {startrev}
parentrevs = self.parentrevs
for r in self.revs(start=startrev + 1):
@@ -1016,10 +1051,17 @@
pass
def _partialmatch(self, id):
+ maybewdir = wdirhex.startswith(id)
try:
partial = self.index.partialmatch(id)
if partial and self.hasnode(partial):
+ if maybewdir:
+ # single 'ff...' match in radix tree, ambiguous with wdir
+ raise RevlogError
return partial
+ if maybewdir:
+ # no 'ff...' match in radix tree, wdir identified
+ raise error.WdirUnsupported
return None
except RevlogError:
# parsers.c radix tree lookup gave multiple matches
@@ -1044,11 +1086,13 @@
nl = [n for n in nl if hex(n).startswith(id) and
self.hasnode(n)]
if len(nl) > 0:
- if len(nl) == 1:
+ if len(nl) == 1 and not maybewdir:
self._pcache[id] = nl[0]
return nl[0]
raise LookupError(id, self.indexfile,
_('ambiguous identifier'))
+ if maybewdir:
+ raise error.WdirUnsupported
return None
except TypeError:
pass
@@ -1075,7 +1119,7 @@
p1, p2 = self.parents(node)
return hash(text, p1, p2) != node
- def _addchunk(self, offset, data):
+ def _cachesegment(self, offset, data):
"""Add a segment to the revlog cache.
Accepts an absolute offset and the data that is at that location.
@@ -1087,7 +1131,7 @@
else:
self._chunkcache = offset, data
- def _loadchunk(self, offset, length, df=None):
+ def _readsegment(self, offset, length, df=None):
"""Load a segment of raw data from the revlog.
Accepts an absolute offset, length to read, and an optional existing
@@ -1118,12 +1162,12 @@
d = df.read(reallength)
if closehandle:
df.close()
- self._addchunk(realoffset, d)
+ self._cachesegment(realoffset, d)
if offset != realoffset or reallength != length:
return util.buffer(d, offset - realoffset, length)
return d
- def _getchunk(self, offset, length, df=None):
+ def _getsegment(self, offset, length, df=None):
"""Obtain a segment of raw data from the revlog.
Accepts an absolute offset, length of bytes to obtain, and an
@@ -1145,9 +1189,9 @@
return d # avoid a copy
return util.buffer(d, cachestart, cacheend - cachestart)
- return self._loadchunk(offset, length, df=df)
+ return self._readsegment(offset, length, df=df)
- def _chunkraw(self, startrev, endrev, df=None):
+ def _getsegmentforrevs(self, startrev, endrev, df=None):
"""Obtain a segment of raw data corresponding to a range of revisions.
Accepts the start and end revisions and an optional already-open
@@ -1179,7 +1223,7 @@
end += (endrev + 1) * self._io.size
length = end - start
- return start, self._getchunk(start, length, df=df)
+ return start, self._getsegment(start, length, df=df)
def _chunk(self, rev, df=None):
"""Obtain a single decompressed chunk for a revision.
@@ -1190,7 +1234,7 @@
Returns a str holding uncompressed data for the requested revision.
"""
- return self.decompress(self._chunkraw(rev, rev, df=df)[1])
+ return self.decompress(self._getsegmentforrevs(rev, rev, df=df)[1])
def _chunks(self, revs, df=None):
"""Obtain decompressed chunks for the specified revisions.
@@ -1217,7 +1261,7 @@
ladd = l.append
try:
- offset, data = self._chunkraw(revs[0], revs[-1], df=df)
+ offset, data = self._getsegmentforrevs(revs[0], revs[-1], df=df)
except OverflowError:
# issue4215 - we can't cache a run of chunks greater than
# 2G on Windows
@@ -1359,6 +1403,9 @@
Note: If the ``raw`` argument is set, it has precedence over the
operation and will only update the value of ``validatehash``.
"""
+ # fast path: no flag processors will run
+ if flags == 0:
+ return text, True
if not operation in ('read', 'write'):
raise ProgrammingError(_("invalid '%s' operation ") % (operation))
# Check all flags are known.
@@ -1443,13 +1490,13 @@
df = self.opener(self.datafile, 'w')
try:
for r in self:
- df.write(self._chunkraw(r, r)[1])
+ df.write(self._getsegmentforrevs(r, r)[1])
finally:
df.close()
fp = self.opener(self.indexfile, 'w', atomictemp=True,
checkambig=self._checkambig)
- self.version &= ~(REVLOGNGINLINEDATA)
+ self.version &= ~FLAG_INLINE_DATA
self._inline = False
for i in self:
e = self._io.packentry(self.index[i], self.node, self.version, i)
@@ -1502,6 +1549,15 @@
if validatehash:
self.checkhash(rawtext, node, p1=p1, p2=p2)
+ return self.addrawrevision(rawtext, transaction, link, p1, p2, node,
+ flags, cachedelta=cachedelta)
+
+ def addrawrevision(self, rawtext, transaction, link, p1, p2, node, flags,
+ cachedelta=None):
+ """add a raw revision with known flags, node and parents
+ useful when reusing a revision not stored in this revlog (ex: received
+ over wire, or read from an external bundle).
+ """
dfh = None
if not self._inline:
dfh = self.opener(self.datafile, "a+")
@@ -1798,9 +1854,7 @@
this revlog and the node that was added.
"""
- # track the base of the current delta log
- content = []
- node = None
+ nodes = []
r = len(self)
end = 0
@@ -1831,7 +1885,7 @@
delta = chunkdata['delta']
flags = chunkdata['flags'] or REVIDX_DEFAULT_FLAGS
- content.append(node)
+ nodes.append(node)
link = linkmapper(cs)
if node in self.nodemap:
@@ -1890,7 +1944,7 @@
dfh.close()
ifh.close()
- return content
+ return nodes
def iscensored(self, rev):
"""Check if a file revision is censored."""
@@ -2027,7 +2081,7 @@
DELTAREUSESAMEREVS = 'samerevs'
DELTAREUSENEVER = 'never'
- DELTAREUSEALL = set(['always', 'samerevs', 'never'])
+ DELTAREUSEALL = {'always', 'samerevs', 'never'}
def clone(self, tr, destrevlog, addrevisioncb=None,
deltareuse=DELTAREUSESAMEREVS, aggressivemergedeltas=None):
--- a/mercurial/revset.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/revset.py Tue Jun 20 16:33:46 2017 -0400
@@ -7,11 +7,11 @@
from __future__ import absolute_import
-import heapq
import re
from .i18n import _
from . import (
+ dagop,
destutil,
encoding,
error,
@@ -24,6 +24,7 @@
registrar,
repoview,
revsetlang,
+ scmutil,
smartset,
util,
)
@@ -48,123 +49,6 @@
spanset = smartset.spanset
fullreposet = smartset.fullreposet
-def _revancestors(repo, revs, followfirst):
- """Like revlog.ancestors(), but supports followfirst."""
- if followfirst:
- cut = 1
- else:
- cut = None
- cl = repo.changelog
-
- def iterate():
- revs.sort(reverse=True)
- irevs = iter(revs)
- h = []
-
- inputrev = next(irevs, None)
- if inputrev is not None:
- heapq.heappush(h, -inputrev)
-
- seen = set()
- while h:
- current = -heapq.heappop(h)
- if current == inputrev:
- inputrev = next(irevs, None)
- if inputrev is not None:
- heapq.heappush(h, -inputrev)
- if current not in seen:
- seen.add(current)
- yield current
- for parent in cl.parentrevs(current)[:cut]:
- if parent != node.nullrev:
- heapq.heappush(h, -parent)
-
- return generatorset(iterate(), iterasc=False)
-
-def _revdescendants(repo, revs, followfirst):
- """Like revlog.descendants() but supports followfirst."""
- if followfirst:
- cut = 1
- else:
- cut = None
-
- def iterate():
- cl = repo.changelog
- # XXX this should be 'parentset.min()' assuming 'parentset' is a
- # smartset (and if it is not, it should.)
- first = min(revs)
- nullrev = node.nullrev
- if first == nullrev:
- # Are there nodes with a null first parent and a non-null
- # second one? Maybe. Do we care? Probably not.
- for i in cl:
- yield i
- else:
- seen = set(revs)
- for i in cl.revs(first + 1):
- for x in cl.parentrevs(i)[:cut]:
- if x != nullrev and x in seen:
- seen.add(i)
- yield i
- break
-
- return generatorset(iterate(), iterasc=True)
-
-def _reachablerootspure(repo, minroot, roots, heads, includepath):
- """return (heads(::<roots> and ::<heads>))
-
- If includepath is True, return (<roots>::<heads>)."""
- if not roots:
- return []
- parentrevs = repo.changelog.parentrevs
- roots = set(roots)
- visit = list(heads)
- reachable = set()
- seen = {}
- # prefetch all the things! (because python is slow)
- reached = reachable.add
- dovisit = visit.append
- nextvisit = visit.pop
- # open-code the post-order traversal due to the tiny size of
- # sys.getrecursionlimit()
- while visit:
- rev = nextvisit()
- if rev in roots:
- reached(rev)
- if not includepath:
- continue
- parents = parentrevs(rev)
- seen[rev] = parents
- for parent in parents:
- if parent >= minroot and parent not in seen:
- dovisit(parent)
- if not reachable:
- return baseset()
- if not includepath:
- return reachable
- for rev in sorted(seen):
- for parent in seen[rev]:
- if parent in reachable:
- reached(rev)
- return reachable
-
-def reachableroots(repo, roots, heads, includepath=False):
- """return (heads(::<roots> and ::<heads>))
-
- If includepath is True, return (<roots>::<heads>)."""
- if not roots:
- return baseset()
- minroot = roots.min()
- roots = list(roots)
- heads = list(heads)
- try:
- revs = repo.changelog.reachableroots(minroot, heads, roots, includepath)
- except AttributeError:
- revs = _reachablerootspure(repo, minroot, roots, heads, includepath)
- revs = baseset(revs)
- revs.sort()
- return revs
-
# helpers
def getset(repo, subset, x):
@@ -185,7 +69,7 @@
# operator methods
def stringset(repo, subset, x):
- x = repo[x].rev()
+ x = scmutil.intrev(repo[x])
if (x in subset
or x == node.nullrev and isinstance(subset, fullreposet)):
return baseset([x])
@@ -236,8 +120,8 @@
def dagrange(repo, subset, x, y, order):
r = fullreposet(repo)
- xs = reachableroots(repo, getset(repo, r, x), getset(repo, r, y),
- includepath=True)
+ xs = dagop.reachableroots(repo, getset(repo, r, x), getset(repo, r, y),
+ includepath=True)
return subset & xs
def andset(repo, subset, x, y, order):
@@ -358,14 +242,19 @@
heads = getset(repo, fullreposet(repo), x)
if not heads:
return baseset()
- s = _revancestors(repo, heads, followfirst)
+ s = dagop.revancestors(repo, heads, followfirst)
return subset & s
@predicate('ancestors(set)', safe=True)
def ancestors(repo, subset, x):
- """Changesets that are ancestors of a changeset in set.
+ """Changesets that are ancestors of changesets in set, including the
+ given changesets themselves.
"""
- return _ancestors(repo, subset, x)
+ args = getargsdict(x, 'ancestors', 'set')
+ if 'set' not in args:
+ # i18n: "ancestors" is a keyword
+ raise error.ParseError(_('ancestors takes at least 1 argument'))
+ return _ancestors(repo, subset, args['set'])
@predicate('_firstancestors', safe=True)
def _firstancestors(repo, subset, x):
@@ -373,17 +262,41 @@
# Like ``ancestors(set)`` but follows only the first parents.
return _ancestors(repo, subset, x, followfirst=True)
+def _childrenspec(repo, subset, x, n, order):
+ """Changesets that are the Nth child of a changeset
+ in set.
+ """
+ cs = set()
+ for r in getset(repo, fullreposet(repo), x):
+ for i in range(n):
+ c = repo[r].children()
+ if len(c) == 0:
+ break
+ if len(c) > 1:
+ raise error.RepoLookupError(
+ _("revision in set has more than one child"))
+ r = c[0].rev()
+ else:
+ cs.add(r)
+ return subset & cs
+
def ancestorspec(repo, subset, x, n, order):
"""``set~n``
Changesets that are the Nth ancestor (first parents only) of a changeset
in set.
"""
n = getinteger(n, _("~ expects a number"))
+ if n < 0:
+ # children lookup
+ return _childrenspec(repo, subset, x, -n, order)
ps = set()
cl = repo.changelog
for r in getset(repo, fullreposet(repo), x):
for i in range(n):
- r = cl.parentrevs(r)[0]
+ try:
+ r = cl.parentrevs(r)[0]
+ except error.WdirUnsupported:
+ r = repo[r].parents()[0].rev()
ps.add(r)
return subset & ps
@@ -451,9 +364,8 @@
for bmrev in matchrevs:
bms.add(repo[bmrev].rev())
else:
- bms = set([repo[r].rev()
- for r in repo._bookmarks.values()])
- bms -= set([node.nullrev])
+ bms = {repo[r].rev() for r in repo._bookmarks.values()}
+ bms -= {node.nullrev}
return subset & bms
@predicate('branch(string or set)', safe=True)
@@ -466,6 +378,11 @@
:hg:`help revisions.patterns`.
"""
getbi = repo.revbranchcache().branchinfo
+ def getbranch(r):
+ try:
+ return getbi(r)[0]
+ except error.WdirUnsupported:
+ return repo[r].branch()
try:
b = getstring(x, '')
@@ -478,21 +395,21 @@
# note: falls through to the revspec case if no branch with
# this name exists and pattern kind is not specified explicitly
if pattern in repo.branchmap():
- return subset.filter(lambda r: matcher(getbi(r)[0]),
+ return subset.filter(lambda r: matcher(getbranch(r)),
condrepr=('<branch %r>', b))
if b.startswith('literal:'):
raise error.RepoLookupError(_("branch '%s' does not exist")
% pattern)
else:
- return subset.filter(lambda r: matcher(getbi(r)[0]),
+ return subset.filter(lambda r: matcher(getbranch(r)),
condrepr=('<branch %r>', b))
s = getset(repo, fullreposet(repo), x)
b = set()
for r in s:
- b.add(getbi(r)[0])
+ b.add(getbranch(r))
c = s.__contains__
- return subset.filter(lambda r: c(r) or getbi(r)[0] in b,
+ return subset.filter(lambda r: c(r) or getbranch(r) in b,
condrepr=lambda: '<branch %r>' % sorted(b))
@predicate('bumped()', safe=True)
@@ -663,7 +580,7 @@
roots = getset(repo, fullreposet(repo), x)
if not roots:
return baseset()
- s = _revdescendants(repo, roots, followfirst)
+ s = dagop.revdescendants(repo, roots, followfirst)
# Both sets need to be ascending in order to lazily return the union
# in the correct order.
@@ -680,9 +597,14 @@
@predicate('descendants(set)', safe=True)
def descendants(repo, subset, x):
- """Changesets which are descendants of changesets in set.
+ """Changesets which are descendants of changesets in set, including the
+ given changesets themselves.
"""
- return _descendants(repo, subset, x)
+ args = getargsdict(x, 'descendants', 'set')
+ if 'set' not in args:
+ # i18n: "descendants" is a keyword
+ raise error.ParseError(_('descendants takes at least 1 argument'))
+ return _descendants(repo, subset, args['set'])
@predicate('_firstdescendants', safe=True)
def _firstdescendants(repo, subset, x):
@@ -850,11 +772,11 @@
return subset & s
-@predicate('first(set, [n])', safe=True)
-def first(repo, subset, x):
+@predicate('first(set, [n])', safe=True, takeorder=True)
+def first(repo, subset, x, order):
"""An alias for limit().
"""
- return limit(repo, subset, x)
+ return limit(repo, subset, x, order)
def _follow(repo, subset, x, name, followfirst=False):
l = getargs(x, 0, 2, _("%s takes no arguments or a pattern "
@@ -882,7 +804,7 @@
# include the revision responsible for the most recent version
s.add(fctx.introrev())
else:
- s = _revancestors(repo, baseset([c.rev()]), followfirst)
+ s = dagop.revancestors(repo, baseset([c.rev()]), followfirst)
return subset & s
@@ -915,8 +837,6 @@
descendants of 'startrev' are returned though renames are (currently) not
followed in this direction.
"""
- from . import context # avoid circular import issues
-
args = getargsdict(x, 'followlines', 'file *lines startrev descend')
if len(args['lines']) != 1:
raise error.ParseError(_("followlines requires a line range"))
@@ -956,12 +876,12 @@
if descend:
rs = generatorset(
(c.rev() for c, _linerange
- in context.blockdescendants(fctx, fromline, toline)),
+ in dagop.blockdescendants(fctx, fromline, toline)),
iterasc=True)
else:
rs = generatorset(
(c.rev() for c, _linerange
- in context.blockancestors(fctx, fromline, toline)),
+ in dagop.blockancestors(fctx, fromline, toline)),
iterasc=False)
return subset & rs
@@ -1118,8 +1038,8 @@
return subset.filter(matches, condrepr=('<keyword %r>', kw))
-@predicate('limit(set[, n[, offset]])', safe=True)
-def limit(repo, subset, x):
+@predicate('limit(set[, n[, offset]])', safe=True, takeorder=True)
+def limit(repo, subset, x, order):
"""First n members of set, defaulting to 1, starting from offset.
"""
args = getargsdict(x, 'limit', 'set n offset')
@@ -1128,28 +1048,20 @@
raise error.ParseError(_("limit requires one to three arguments"))
# i18n: "limit" is a keyword
lim = getinteger(args.get('n'), _("limit expects a number"), default=1)
+ if lim < 0:
+ raise error.ParseError(_("negative number to select"))
# i18n: "limit" is a keyword
ofs = getinteger(args.get('offset'), _("limit expects a number"), default=0)
if ofs < 0:
raise error.ParseError(_("negative offset"))
os = getset(repo, fullreposet(repo), args['set'])
- result = []
- it = iter(os)
- for x in xrange(ofs):
- y = next(it, None)
- if y is None:
- break
- for x in xrange(lim):
- y = next(it, None)
- if y is None:
- break
- elif y in subset:
- result.append(y)
- return baseset(result, datarepr=('<limit n=%d, offset=%d, %r, %r>',
- lim, ofs, subset, os))
+ ls = os.slice(ofs, ofs + lim)
+ if order == followorder and lim > 1:
+ return subset & ls
+ return ls & subset
-@predicate('last(set, [n])', safe=True)
-def last(repo, subset, x):
+@predicate('last(set, [n])', safe=True, takeorder=True)
+def last(repo, subset, x, order):
"""Last n members of set, defaulting to 1.
"""
# i18n: "last" is a keyword
@@ -1158,17 +1070,15 @@
if len(l) == 2:
# i18n: "last" is a keyword
lim = getinteger(l[1], _("last expects a number"))
+ if lim < 0:
+ raise error.ParseError(_("negative number to select"))
os = getset(repo, fullreposet(repo), l[0])
os.reverse()
- result = []
- it = iter(os)
- for x in xrange(lim):
- y = next(it, None)
- if y is None:
- break
- elif y in subset:
- result.append(y)
- return baseset(result, datarepr=('<last n=%d, %r, %r>', lim, subset, os))
+ ls = os.slice(0, lim)
+ if order == followorder and lim > 1:
+ return subset & ls
+ ls.reverse()
+ return ls & subset
@predicate('max(set)', safe=True)
def maxrev(repo, subset, x):
@@ -1276,7 +1186,7 @@
if name not in ns.deprecated:
names.update(repo[n].rev() for n in ns.nodes(repo, name))
- names -= set([node.nullrev])
+ names -= {node.nullrev}
return subset & names
@predicate('id(string)', safe=True)
@@ -1290,13 +1200,18 @@
if len(n) == 40:
try:
rn = repo.changelog.rev(node.bin(n))
+ except error.WdirUnsupported:
+ rn = node.wdirrev
except (LookupError, TypeError):
rn = None
else:
rn = None
- pm = repo.changelog._partialmatch(n)
- if pm is not None:
- rn = repo.changelog.rev(pm)
+ try:
+ pm = repo.changelog._partialmatch(n)
+ if pm is not None:
+ rn = repo.changelog.rev(pm)
+ except error.WdirUnsupported:
+ rn = node.wdirrev
if rn is None:
return baseset()
@@ -1326,7 +1241,7 @@
if not include:
return baseset()
- descendants = set(_revdescendants(repo, include, False))
+ descendants = set(dagop.revdescendants(repo, include, False))
exclude = [rev for rev in cl.headrevs()
if not rev in descendants and not rev in include]
else:
@@ -1363,8 +1278,8 @@
return src
src = prev
- o = set([_firstsrc(r) for r in dests])
- o -= set([None])
+ o = {_firstsrc(r) for r in dests}
+ o -= {None}
# XXX we should turn this into a baseset instead of a set, smartset may do
# some optimizations from the fact this is a baseset.
return subset & o
@@ -1393,7 +1308,7 @@
outgoing = discovery.findcommonoutgoing(repo, other, onlyheads=revs)
repo.ui.popbuffer()
cl = repo.changelog
- o = set([cl.rev(r) for r in outgoing.missing])
+ o = {cl.rev(r) for r in outgoing.missing}
return subset & o
@predicate('p1([set])', safe=True)
@@ -1409,8 +1324,11 @@
ps = set()
cl = repo.changelog
for r in getset(repo, fullreposet(repo), x):
- ps.add(cl.parentrevs(r)[0])
- ps -= set([node.nullrev])
+ try:
+ ps.add(cl.parentrevs(r)[0])
+ except error.WdirUnsupported:
+ ps.add(repo[r].parents()[0].rev())
+ ps -= {node.nullrev}
# XXX we should turn this into a baseset instead of a set, smartset may do
# some optimizations from the fact this is a baseset.
return subset & ps
@@ -1432,8 +1350,13 @@
ps = set()
cl = repo.changelog
for r in getset(repo, fullreposet(repo), x):
- ps.add(cl.parentrevs(r)[1])
- ps -= set([node.nullrev])
+ try:
+ ps.add(cl.parentrevs(r)[1])
+ except error.WdirUnsupported:
+ parents = repo[r].parents()
+ if len(parents) == 2:
+ ps.add(parents[1])
+ ps -= {node.nullrev}
# XXX we should turn this into a baseset instead of a set, smartset may do
# some optimizations from the fact this is a baseset.
return subset & ps
@@ -1454,11 +1377,11 @@
up = ps.update
parentrevs = cl.parentrevs
for r in getset(repo, fullreposet(repo), x):
- if r == node.wdirrev:
+ try:
+ up(parentrevs(r))
+ except error.WdirUnsupported:
up(p.rev() for p in repo[r].parents())
- else:
- up(parentrevs(r))
- ps -= set([node.nullrev])
+ ps -= {node.nullrev}
return subset & ps
def _phase(repo, subset, *targets):
@@ -1500,11 +1423,19 @@
if n == 0:
ps.add(r)
elif n == 1:
- ps.add(cl.parentrevs(r)[0])
- elif n == 2:
- parents = cl.parentrevs(r)
- if parents[1] != node.nullrev:
- ps.add(parents[1])
+ try:
+ ps.add(cl.parentrevs(r)[0])
+ except error.WdirUnsupported:
+ ps.add(repo[r].parents()[0].rev())
+ else:
+ try:
+ parents = cl.parentrevs(r)
+ if parents[1] != node.nullrev:
+ ps.add(parents[1])
+ except error.WdirUnsupported:
+ parents = repo[r].parents()
+ if len(parents) == 2:
+ ps.add(parents[1].rev())
return subset & ps
@predicate('present(set)', safe=True)
@@ -1597,7 +1528,7 @@
except (TypeError, ValueError):
# i18n: "rev" is a keyword
raise error.ParseError(_("rev expects a number"))
- if l not in repo.changelog and l != node.nullrev:
+ if l not in repo.changelog and l not in (node.nullrev, node.wdirrev):
return baseset()
return subset & baseset([l])
@@ -1812,7 +1743,8 @@
firstbranch = ()
if 'topo.firstbranch' in opts:
firstbranch = getset(repo, subset, opts['topo.firstbranch'])
- revs = baseset(_toposort(revs, repo.changelog.parentrevs, firstbranch),
+ revs = baseset(dagop.toposort(revs, repo.changelog.parentrevs,
+ firstbranch),
istopo=True)
if keyflags[0][1]:
revs.reverse()
@@ -1824,204 +1756,6 @@
ctxs.sort(key=_sortkeyfuncs[k], reverse=reverse)
return baseset([c.rev() for c in ctxs])
-def _toposort(revs, parentsfunc, firstbranch=()):
- """Yield revisions from heads to roots one (topo) branch at a time.
-
- This function aims to be used by a graph generator that wishes to minimize
- the number of parallel branches and their interleaving.
-
- Example iteration order (numbers show the "true" order in a changelog):
-
- o 4
- |
- o 1
- |
- | o 3
- | |
- | o 2
- |/
- o 0
-
- Note that the ancestors of merges are understood by the current
- algorithm to be on the same branch. This means no reordering will
- occur behind a merge.
- """
-
- ### Quick summary of the algorithm
- #
- # This function is based around a "retention" principle. We keep revisions
- # in memory until we are ready to emit a whole branch that immediately
- # "merges" into an existing one. This reduces the number of parallel
- # branches with interleaved revisions.
- #
- # During iteration revs are split into two groups:
- # A) revision already emitted
- # B) revision in "retention". They are stored as different subgroups.
- #
- # for each REV, we do the following logic:
- #
- # 1) if REV is a parent of (A), we will emit it. If there is a
- # retention group ((B) above) that is blocked on REV being
- # available, we emit all the revisions out of that retention
- # group first.
- #
- # 2) else, we'll search for a subgroup in (B) awaiting for REV to be
- # available, if such subgroup exist, we add REV to it and the subgroup is
- # now awaiting for REV.parents() to be available.
- #
- # 3) finally if no such group existed in (B), we create a new subgroup.
- #
- #
- # To bootstrap the algorithm, we emit the tipmost revision (which
- # puts it in group (A) from above).
-
- revs.sort(reverse=True)
-
- # Set of parents of revision that have been emitted. They can be considered
- # unblocked as the graph generator is already aware of them so there is no
- # need to delay the revisions that reference them.
- #
- # If someone wants to prioritize a branch over the others, pre-filling this
- # set will force all other branches to wait until this branch is ready to be
- # emitted.
- unblocked = set(firstbranch)
-
- # list of groups waiting to be displayed, each group is defined by:
- #
- # (revs: lists of revs waiting to be displayed,
- # blocked: set of that cannot be displayed before those in 'revs')
- #
- # The second value ('blocked') correspond to parents of any revision in the
- # group ('revs') that is not itself contained in the group. The main idea
- # of this algorithm is to delay as much as possible the emission of any
- # revision. This means waiting for the moment we are about to display
- # these parents to display the revs in a group.
- #
- # This first implementation is smart until it encounters a merge: it will
- # emit revs as soon as any parent is about to be emitted and can grow an
- # arbitrary number of revs in 'blocked'. In practice this mean we properly
- # retains new branches but gives up on any special ordering for ancestors
- # of merges. The implementation can be improved to handle this better.
- #
- # The first subgroup is special. It corresponds to all the revision that
- # were already emitted. The 'revs' lists is expected to be empty and the
- # 'blocked' set contains the parents revisions of already emitted revision.
- #
- # You could pre-seed the <parents> set of groups[0] to a specific
- # changesets to select what the first emitted branch should be.
- groups = [([], unblocked)]
- pendingheap = []
- pendingset = set()
-
- heapq.heapify(pendingheap)
- heappop = heapq.heappop
- heappush = heapq.heappush
- for currentrev in revs:
- # Heap works with smallest element, we want highest so we invert
- if currentrev not in pendingset:
- heappush(pendingheap, -currentrev)
- pendingset.add(currentrev)
- # iterates on pending rev until after the current rev have been
- # processed.
- rev = None
- while rev != currentrev:
- rev = -heappop(pendingheap)
- pendingset.remove(rev)
-
- # Seek for a subgroup blocked, waiting for the current revision.
- matching = [i for i, g in enumerate(groups) if rev in g[1]]
-
- if matching:
- # The main idea is to gather together all sets that are blocked
- # on the same revision.
- #
- # Groups are merged when a common blocking ancestor is
- # observed. For example, given two groups:
- #
- # revs [5, 4] waiting for 1
- # revs [3, 2] waiting for 1
- #
- # These two groups will be merged when we process
- # 1. In theory, we could have merged the groups when
- # we added 2 to the group it is now in (we could have
- # noticed the groups were both blocked on 1 then), but
- # the way it works now makes the algorithm simpler.
- #
- # We also always keep the oldest subgroup first. We can
- # probably improve the behavior by having the longest set
- # first. That way, graph algorithms could minimise the length
- # of parallel lines their drawing. This is currently not done.
- targetidx = matching.pop(0)
- trevs, tparents = groups[targetidx]
- for i in matching:
- gr = groups[i]
- trevs.extend(gr[0])
- tparents |= gr[1]
- # delete all merged subgroups (except the one we kept)
- # (starting from the last subgroup for performance and
- # sanity reasons)
- for i in reversed(matching):
- del groups[i]
- else:
- # This is a new head. We create a new subgroup for it.
- targetidx = len(groups)
- groups.append(([], set([rev])))
-
- gr = groups[targetidx]
-
- # We now add the current nodes to this subgroups. This is done
- # after the subgroup merging because all elements from a subgroup
- # that relied on this rev must precede it.
- #
- # we also update the <parents> set to include the parents of the
- # new nodes.
- if rev == currentrev: # only display stuff in rev
- gr[0].append(rev)
- gr[1].remove(rev)
- parents = [p for p in parentsfunc(rev) if p > node.nullrev]
- gr[1].update(parents)
- for p in parents:
- if p not in pendingset:
- pendingset.add(p)
- heappush(pendingheap, -p)
-
- # Look for a subgroup to display
- #
- # When unblocked is empty (if clause), we were not waiting for any
- # revisions during the first iteration (if no priority was given) or
- # if we emitted a whole disconnected set of the graph (reached a
- # root). In that case we arbitrarily take the oldest known
- # subgroup. The heuristic could probably be better.
- #
- # Otherwise (elif clause) if the subgroup is blocked on
- # a revision we just emitted, we can safely emit it as
- # well.
- if not unblocked:
- if len(groups) > 1: # display other subset
- targetidx = 1
- gr = groups[1]
- elif not gr[1] & unblocked:
- gr = None
-
- if gr is not None:
- # update the set of awaited revisions with the one from the
- # subgroup
- unblocked |= gr[1]
- # output all revisions in the subgroup
- for r in gr[0]:
- yield r
- # delete the subgroup that you just output
- # unless it is groups[0] in which case you just empty it.
- if targetidx:
- del groups[targetidx]
- else:
- gr[0][:] = []
- # Check if we have some subgroup waiting for revisions we are not going to
- # iterate over
- for g in groups:
- for r in g[0]:
- yield r
-
@predicate('subrepo([pattern])')
def subrepo(repo, subset, x):
"""Changesets that add, modify or remove the given subrepo. If no subrepo
@@ -2098,11 +1832,11 @@
if tn is None:
raise error.RepoLookupError(_("tag '%s' does not exist")
% pattern)
- s = set([repo[tn].rev()])
+ s = {repo[tn].rev()}
else:
- s = set([cl.rev(n) for t, n in repo.tagslist() if matcher(t)])
+ s = {cl.rev(n) for t, n in repo.tagslist() if matcher(t)}
else:
- s = set([cl.rev(n) for t, n in repo.tagslist() if t != 'tip'])
+ s = {cl.rev(n) for t, n in repo.tagslist() if t != 'tip'}
return subset & s
@predicate('tagged', safe=True)
@@ -2128,7 +1862,7 @@
"""
return author(repo, subset, x)
-@predicate('wdir', safe=True)
+@predicate('wdir()', safe=True)
def wdir(repo, subset, x):
"""Working directory. (EXPERIMENTAL)"""
# i18n: "wdir" is a keyword
--- a/mercurial/revsetlang.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/revsetlang.py Tue Jun 20 16:33:46 2017 -0400
@@ -44,9 +44,9 @@
"end": (0, None, None, None, None),
}
-keywords = set(['and', 'or', 'not'])
+keywords = {'and', 'or', 'not'}
-_quoteletters = set(['"', "'"])
+_quoteletters = {'"', "'"}
_simpleopletters = set(pycompat.iterbytestr("():=,-|&+!~^%"))
# default set of valid characters for the initial letter of symbols
@@ -236,6 +236,25 @@
return parser.buildargsdict(getlist(x), funcname, parser.splitargspec(keys),
keyvaluenode='keyvalue', keynode='symbol')
+def _isnamedfunc(x, funcname):
+ """Check if given tree matches named function"""
+ return x and x[0] == 'func' and getsymbol(x[1]) == funcname
+
+def _isposargs(x, n):
+ """Check if given tree is n-length list of positional arguments"""
+ l = getlist(x)
+ return len(l) == n and all(y and y[0] != 'keyvalue' for y in l)
+
+def _matchnamedfunc(x, funcname):
+ """Return args tree if given tree matches named function; otherwise None
+
+ This can't be used for testing a nullary function since its args tree
+ is also None. Use _isnamedfunc() instead.
+ """
+ if not _isnamedfunc(x, funcname):
+ return
+ return x[2]
+
# Constants for ordering requirement, used in _analyze():
#
# If 'define', any nested functions and operations can change the ordering of
@@ -286,14 +305,10 @@
>>> f('ancestors(A)', 'not ancestors(B)')
('list', ('symbol', 'A'), ('symbol', 'B'))
"""
- if (revs is not None
- and revs[0] == 'func'
- and getsymbol(revs[1]) == 'ancestors'
- and bases is not None
- and bases[0] == 'not'
- and bases[1][0] == 'func'
- and getsymbol(bases[1][1]) == 'ancestors'):
- return ('list', revs[2], bases[1][2])
+ ta = _matchnamedfunc(revs, 'ancestors')
+ tb = bases and bases[0] == 'not' and _matchnamedfunc(bases[1], 'ancestors')
+ if _isposargs(ta, 1) and _isposargs(tb, 1):
+ return ('list', ta, tb)
def _fixops(x):
"""Rewrite raw parsed tree to resolve ambiguous syntax which cannot be
--- a/mercurial/scmposix.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/scmposix.py Tue Jun 20 16:33:46 2017 -0400
@@ -8,8 +8,8 @@
from . import (
encoding,
- osutil,
pycompat,
+ util,
)
# BSD 'more' escapes ANSI color sequences by default. This can be disabled by
@@ -23,7 +23,7 @@
rcdir = os.path.join(path, 'hgrc.d')
try:
rcs.extend([os.path.join(rcdir, f)
- for f, kind in osutil.listdir(rcdir)
+ for f, kind in util.listdir(rcdir)
if f.endswith(".rc")])
except OSError:
pass
--- a/mercurial/scmutil.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/scmutil.py Tue Jun 20 16:33:46 2017 -0400
@@ -15,7 +15,11 @@
import socket
from .i18n import _
-from .node import wdirrev
+from .node import (
+ wdirid,
+ wdirrev,
+)
+
from . import (
encoding,
error,
@@ -26,7 +30,6 @@
revsetlang,
similar,
util,
- vfs as vfsmod,
)
if pycompat.osname == 'nt':
@@ -121,10 +124,6 @@
secretlist = []
if excluded:
for n in excluded:
- if n not in repo:
- # discovery should not have included the filtered revision,
- # we have to explicitly exclude it until discovery is cleanup.
- continue
ctx = repo[n]
if ctx.phase() >= phases.secret and not ctx.extinct():
secretlist.append(n)
@@ -186,13 +185,13 @@
ui.warn(_("abort: file censored %s!\n") % inst)
except error.RevlogError as inst:
ui.warn(_("abort: %s!\n") % inst)
- except error.SignalInterrupt:
- ui.warn(_("killed!\n"))
except error.InterventionRequired as inst:
ui.warn("%s\n" % inst)
if inst.hint:
ui.warn(_("(%s)\n") % inst.hint)
return 1
+ except error.WdirUnsupported:
+ ui.warn(_("abort: working directory revision cannot be specified\n"))
except error.Abort as inst:
ui.warn(_("abort: %s\n") % inst)
if inst.hint:
@@ -215,7 +214,7 @@
reason = inst.reason
if isinstance(reason, unicode):
# SSLError of Python 2.7.9 contains a unicode
- reason = reason.encode(encoding.encoding, 'replace')
+ reason = encoding.unitolocal(reason)
ui.warn(_("abort: error: %s\n") % reason)
elif (util.safehasattr(inst, "args")
and inst.args and inst.args[0] == errno.EPIPE):
@@ -335,27 +334,6 @@
key = s.digest()
return key
-def _deprecated(old, new, func):
- msg = ('class at mercurial.scmutil.%s moved to mercurial.vfs.%s'
- % (old, new))
- def wrapper(*args, **kwargs):
- util.nouideprecwarn(msg, '4.2')
- return func(*args, **kwargs)
- return wrapper
-
-# compatibility layer since all 'vfs' code moved to 'mercurial.vfs'
-#
-# This is hard to instal deprecation warning to this since we do not have
-# access to a 'ui' object.
-opener = _deprecated('opener', 'vfs', vfsmod.vfs)
-vfs = _deprecated('vfs', 'vfs', vfsmod.vfs)
-filteropener = _deprecated('filteropener', 'filtervfs', vfsmod.filtervfs)
-filtervfs = _deprecated('filtervfs', 'filtervfs', vfsmod.filtervfs)
-abstractvfs = _deprecated('abstractvfs', 'abstractvfs', vfsmod.abstractvfs)
-readonlyvfs = _deprecated('readonlyvfs', 'readonlyvfs', vfsmod.readonlyvfs)
-auditvfs = _deprecated('auditvfs', 'auditvfs', vfsmod.auditvfs)
-checkambigatclosing = vfsmod.checkambigatclosing
-
def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
'''yield every hg repository under path, always recursively.
The recurse flag will only control recursion into repo working dirs'''
@@ -404,9 +382,17 @@
newdirs.append(d)
dirs[:] = newdirs
-def intrev(rev):
- """Return integer for a given revision that can be used in comparison or
+def binnode(ctx):
+ """Return binary node id for a given basectx"""
+ node = ctx.node()
+ if node is None:
+ return wdirid
+ return node
+
+def intrev(ctx):
+ """Return integer for a given basectx that can be used in comparison or
arithmetic operation"""
+ rev = ctx.rev()
if rev is None:
return wdirrev
return rev
@@ -494,7 +480,7 @@
return parents
if repo.ui.debugflag:
return [parents[0], repo['null']]
- if parents[0].rev() >= intrev(ctx.rev()) - 1:
+ if parents[0].rev() >= intrev(ctx) - 1:
return []
return parents
@@ -945,25 +931,57 @@
Keys must be alphanumerics and start with a letter, values must not
contain '\n' characters"""
+ firstlinekey = '__firstline'
def __init__(self, vfs, path, keys=None):
self.vfs = vfs
self.path = path
- def read(self):
+ def read(self, firstlinenonkeyval=False):
+ """Read the contents of a simple key-value file
+
+ 'firstlinenonkeyval' indicates whether the first line of file should
+ be treated as a key-value pair or reuturned fully under the
+ __firstline key."""
lines = self.vfs.readlines(self.path)
+ d = {}
+ if firstlinenonkeyval:
+ if not lines:
+ e = _("empty simplekeyvalue file")
+ raise error.CorruptedState(e)
+ # we don't want to include '\n' in the __firstline
+ d[self.firstlinekey] = lines[0][:-1]
+ del lines[0]
+
try:
- d = dict(line[:-1].split('=', 1) for line in lines if line)
+ # the 'if line.strip()' part prevents us from failing on empty
+ # lines which only contain '\n' therefore are not skipped
+ # by 'if line'
+ updatedict = dict(line[:-1].split('=', 1) for line in lines
+ if line.strip())
+ if self.firstlinekey in updatedict:
+ e = _("%r can't be used as a key")
+ raise error.CorruptedState(e % self.firstlinekey)
+ d.update(updatedict)
except ValueError as e:
raise error.CorruptedState(str(e))
return d
- def write(self, data):
+ def write(self, data, firstline=None):
"""Write key=>value mapping to a file
data is a dict. Keys must be alphanumerical and start with a letter.
- Values must not contain newline characters."""
+ Values must not contain newline characters.
+
+ If 'firstline' is not None, it is written to file before
+ everything else, as it is, not in a key=value form"""
lines = []
+ if firstline is not None:
+ lines.append('%s\n' % firstline)
+
for k, v in data.items():
+ if k == self.firstlinekey:
+ e = "key name '%s' is reserved" % self.firstlinekey
+ raise error.ProgrammingError(e)
if not k[0].isalpha():
e = "keys must start with a letter in a key-value file"
raise error.ProgrammingError(e)
--- a/mercurial/scmwindows.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/scmwindows.py Tue Jun 20 16:33:46 2017 -0400
@@ -4,7 +4,6 @@
from . import (
encoding,
- osutil,
pycompat,
util,
win32,
@@ -29,7 +28,7 @@
# Use hgrc.d found in directory with hg.exe
progrcd = os.path.join(os.path.dirname(filename), 'hgrc.d')
if os.path.isdir(progrcd):
- for f, kind in osutil.listdir(progrcd):
+ for f, kind in util.listdir(progrcd):
if f.endswith('.rc'):
rcpath.append(os.path.join(progrcd, f))
# else look for a system rcpath in the registry
@@ -42,7 +41,7 @@
if p.lower().endswith('mercurial.ini'):
rcpath.append(p)
elif os.path.isdir(p):
- for f, kind in osutil.listdir(p):
+ for f, kind in util.listdir(p):
if f.endswith('.rc'):
rcpath.append(os.path.join(p, f))
return rcpath
--- a/mercurial/server.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/server.py Tue Jun 20 16:33:46 2017 -0400
@@ -8,7 +8,6 @@
from __future__ import absolute_import
import os
-import sys
import tempfile
from .i18n import _
@@ -19,6 +18,7 @@
commandserver,
error,
hgweb,
+ pycompat,
util,
)
@@ -29,11 +29,11 @@
def writepid(pid):
if opts['pid_file']:
if appendpid:
- mode = 'a'
+ mode = 'ab'
else:
- mode = 'w'
+ mode = 'wb'
fp = open(opts['pid_file'], mode)
- fp.write(str(pid) + '\n')
+ fp.write('%d\n' % pid)
fp.close()
if opts['daemon'] and not opts['daemon_postexec']:
@@ -42,7 +42,7 @@
os.close(lockfd)
try:
if not runargs:
- runargs = util.hgcmd() + sys.argv[1:]
+ runargs = util.hgcmd() + pycompat.sysargv[1:]
runargs.append('--daemon-postexec=unlink:%s' % lockpath)
# Don't pass --cwd to the child process, because we've already
# changed directory.
@@ -123,7 +123,7 @@
if opts.get('port'):
opts['port'] = util.getport(opts.get('port'))
- alluis = set([ui])
+ alluis = {ui}
if repo:
baseui = repo.baseui
alluis.update([repo.baseui, repo.ui])
--- a/mercurial/setdiscovery.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/setdiscovery.py Tue Jun 20 16:33:46 2017 -0400
@@ -53,6 +53,7 @@
from . import (
dagutil,
error,
+ util,
)
def _updatesample(dag, nodes, sample, quicksamplesize=0):
@@ -136,6 +137,8 @@
'''Return a tuple (common, anyincoming, remoteheads) used to identify
missing nodes from or in remote.
'''
+ start = util.timer()
+
roundtrips = 0
cl = local.changelog
dag = dagutil.revlogdag(cl)
@@ -235,15 +238,21 @@
# common.bases can include nullrev, but our contract requires us to not
# return any heads in that case, so discard that
result.discard(nullrev)
+ elapsed = util.timer() - start
ui.progress(_('searching'), None)
- ui.debug("%d total queries\n" % roundtrips)
+ ui.debug("%d total queries in %.4fs\n" % (roundtrips, elapsed))
+ msg = ('found %d common and %d unknown server heads,'
+ ' %d roundtrips in %.4fs\n')
+ missing = set(result) - set(srvheads)
+ ui.log('discovery', msg, len(result), len(missing), roundtrips,
+ elapsed)
if not result and srvheadhashes != [nullid]:
if abortwhenunrelated:
raise error.Abort(_("repository is unrelated"))
else:
ui.warn(_("warning: repository is unrelated\n"))
- return (set([nullid]), True, srvheadhashes,)
+ return ({nullid}, True, srvheadhashes,)
anyincoming = (srvheadhashes != [nullid])
return dag.externalizeall(result), anyincoming, srvheadhashes
--- a/mercurial/similar.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/similar.py Tue Jun 20 16:33:46 2017 -0400
@@ -9,7 +9,6 @@
from .i18n import _
from . import (
- bdiff,
mdiff,
)
@@ -56,10 +55,10 @@
def _score(fctx, otherdata):
orig, lines = otherdata
text = fctx.data()
- # bdiff.blocks() returns blocks of matching lines
+ # mdiff.blocks() returns blocks of matching lines
# count the number of bytes in each
equal = 0
- matches = bdiff.blocks(text, orig)
+ matches = mdiff.blocks(text, orig)
for x1, x2, y1, y2 in matches:
for line in lines[y1:y2]:
equal += len(line)
--- a/mercurial/smartset.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/smartset.py Tue Jun 20 16:33:46 2017 -0400
@@ -8,6 +8,7 @@
from __future__ import absolute_import
from . import (
+ error,
util,
)
@@ -155,6 +156,28 @@
condition = util.cachefunc(condition)
return filteredset(self, condition, condrepr)
+ def slice(self, start, stop):
+ """Return new smartset that contains selected elements from this set"""
+ if start < 0 or stop < 0:
+ raise error.ProgrammingError('negative index not allowed')
+ return self._slice(start, stop)
+
+ def _slice(self, start, stop):
+ # sub classes may override this. start and stop must not be negative,
+ # but start > stop is allowed, which should be an empty set.
+ ys = []
+ it = iter(self)
+ for x in xrange(start):
+ y = next(it, None)
+ if y is None:
+ break
+ for x in xrange(stop - start):
+ y = next(it, None)
+ if y is None:
+ break
+ ys.append(y)
+ return baseset(ys, datarepr=('slice=%d:%d %r', start, stop, self))
+
class baseset(abstractsmartset):
"""Basic data structure that represents a revset and contains the basic
operation that it should be able to perform.
@@ -245,7 +268,7 @@
@util.propertycache
def _list(self):
# _list is only lazily constructed if we have _set
- assert '_set' in self.__dict__
+ assert r'_set' in self.__dict__
return list(self._set)
def __iter__(self):
@@ -349,6 +372,18 @@
def __sub__(self, other):
return self._fastsetop(other, '__sub__')
+ def _slice(self, start, stop):
+ # creating new list should be generally cheaper than iterating items
+ if self._ascending is None:
+ return baseset(self._list[start:stop], istopo=self._istopo)
+
+ data = self._asclist
+ if not self._ascending:
+ start, stop = max(len(data) - stop, 0), max(len(data) - start, 0)
+ s = baseset(data[start:stop], istopo=self._istopo)
+ s._ascending = self._ascending
+ return s
+
def __repr__(self):
d = {None: '', False: '-', True: '+'}[self._ascending]
s = _formatsetrepr(self._datarepr)
@@ -906,7 +941,22 @@
d = {False: '-', True: '+'}[self._ascending]
return '<%s%s>' % (type(self).__name__, d)
-class spanset(abstractsmartset):
+def spanset(repo, start=0, end=None):
+ """Create a spanset that represents a range of repository revisions
+
+ start: first revision included the set (default to 0)
+ end: first revision excluded (last+1) (default to len(repo))
+
+ Spanset will be descending if `end` < `start`.
+ """
+ if end is None:
+ end = len(repo)
+ ascending = start <= end
+ if not ascending:
+ start, end = end + 1, start + 1
+ return _spanset(start, end, ascending, repo.changelog.filteredrevs)
+
+class _spanset(abstractsmartset):
"""Duck type for baseset class which represents a range of revisions and
can work lazily and without having all the range in memory
@@ -916,23 +966,11 @@
- revision filtered with this repoview will be skipped.
"""
- def __init__(self, repo, start=0, end=None):
- """
- start: first revision included the set
- (default to 0)
- end: first revision excluded (last+1)
- (default to len(repo)
-
- Spanset will be descending if `end` < `start`.
- """
- if end is None:
- end = len(repo)
- self._ascending = start <= end
- if not self._ascending:
- start, end = end + 1, start +1
+ def __init__(self, start, end, ascending, hiddenrevs):
self._start = start
self._end = end
- self._hiddenrevs = repo.changelog.filteredrevs
+ self._ascending = ascending
+ self._hiddenrevs = hiddenrevs
def sort(self, reverse=False):
self._ascending = not reverse
@@ -1018,12 +1056,24 @@
return x
return None
+ def _slice(self, start, stop):
+ if self._hiddenrevs:
+ # unoptimized since all hidden revisions in range has to be scanned
+ return super(_spanset, self)._slice(start, stop)
+ if self._ascending:
+ x = min(self._start + start, self._end)
+ y = min(self._start + stop, self._end)
+ else:
+ x = max(self._end - stop, self._start)
+ y = max(self._end - start, self._start)
+ return _spanset(x, y, self._ascending, self._hiddenrevs)
+
def __repr__(self):
d = {False: '-', True: '+'}[self._ascending]
- return '<%s%s %d:%d>' % (type(self).__name__, d,
- self._start, self._end - 1)
+ return '<%s%s %d:%d>' % (type(self).__name__.lstrip('_'), d,
+ self._start, self._end)
-class fullreposet(spanset):
+class fullreposet(_spanset):
"""a set containing all revisions in the repo
This class exists to host special optimization and magic to handle virtual
@@ -1031,7 +1081,8 @@
"""
def __init__(self, repo):
- super(fullreposet, self).__init__(repo)
+ super(fullreposet, self).__init__(0, len(repo), True,
+ repo.changelog.filteredrevs)
def __and__(self, other):
"""As self contains the whole repo, all of the other set should also be
--- a/mercurial/sslutil.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/sslutil.py Tue Jun 20 16:33:46 2017 -0400
@@ -13,7 +13,6 @@
import os
import re
import ssl
-import sys
from .i18n import _
from . import (
@@ -30,17 +29,17 @@
# modern/secure or legacy/insecure. Many operations in this module have
# separate code paths depending on support in Python.
-configprotocols = set([
+configprotocols = {
'tls1.0',
'tls1.1',
'tls1.2',
-])
+}
hassni = getattr(ssl, 'HAS_SNI', False)
# TLS 1.1 and 1.2 may not be supported if the OpenSSL Python is compiled
# against doesn't support them.
-supportedprotocols = set(['tls1.0'])
+supportedprotocols = {'tls1.0'}
if util.safehasattr(ssl, 'PROTOCOL_TLSv1_1'):
supportedprotocols.add('tls1.1')
if util.safehasattr(ssl, 'PROTOCOL_TLSv1_2'):
@@ -58,9 +57,6 @@
# We implement SSLContext using the interface from the standard library.
class SSLContext(object):
- # ssl.wrap_socket gained the "ciphers" named argument in 2.7.
- _supportsciphers = sys.version_info >= (2, 7)
-
def __init__(self, protocol):
# From the public interface of SSLContext
self.protocol = protocol
@@ -92,13 +88,6 @@
self._cacerts = cafile
def set_ciphers(self, ciphers):
- if not self._supportsciphers:
- raise error.Abort(_('setting ciphers in [hostsecurity] is not '
- 'supported by this version of Python'),
- hint=_('remove the config option or run '
- 'Mercurial with a modern Python '
- 'version (preferred)'))
-
self._ciphers = ciphers
def wrap_socket(self, socket, server_hostname=None, server_side=False):
@@ -113,11 +102,9 @@
'cert_reqs': self.verify_mode,
'ssl_version': self.protocol,
'ca_certs': self._cacerts,
+ 'ciphers': self._ciphers,
}
- if self._supportsciphers:
- args['ciphers'] = self._ciphers
-
return ssl.wrap_socket(socket, **args)
def _hostsettings(ui, hostname):
@@ -309,7 +296,7 @@
# disable protocols via SSLContext.options and OP_NO_* constants.
# However, SSLContext.options doesn't work unless we have the
# full/real SSLContext available to us.
- if supportedprotocols == set(['tls1.0']):
+ if supportedprotocols == {'tls1.0'}:
if protocol != 'tls1.0':
raise error.Abort(_('current Python does not support protocol '
'setting %s') % protocol,
@@ -443,7 +430,7 @@
# is really old. (e.g. server doesn't support TLS 1.0+ or
# client doesn't support modern TLS versions introduced
# several years from when this comment was written).
- if supportedprotocols != set(['tls1.0']):
+ if supportedprotocols != {'tls1.0'}:
ui.warn(_(
'(could not communicate with %s using security '
'protocols %s; if you are using a modern Mercurial '
@@ -820,13 +807,11 @@
if settings['legacyfingerprint']:
ui.warn(_('(SHA-1 fingerprint for %s found in legacy '
'[hostfingerprints] section; '
- 'if you trust this fingerprint, set the '
- 'following config value in [hostsecurity] and '
- 'remove the old one from [hostfingerprints] '
- 'to upgrade to a more secure SHA-256 '
- 'fingerprint: '
- '%s:fingerprints=%s)\n') % (
- host, host, nicefingerprint))
+ 'if you trust this fingerprint, remove the old '
+ 'SHA-1 fingerprint from [hostfingerprints] and '
+ 'add the following entry to the new '
+ '[hostsecurity] section: %s:fingerprints=%s)\n') %
+ (host, host, nicefingerprint))
return
# Pinned fingerprint didn't match. This is a fatal error.
--- a/mercurial/statichttprepo.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/statichttprepo.py Tue Jun 20 16:33:46 2017 -0400
@@ -127,6 +127,7 @@
self._phasedefaults = []
self.names = namespaces.namespaces()
+ self.filtername = None
try:
requirements = scmutil.readrequires(self.vfs, self.supported)
--- a/mercurial/statprof.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/statprof.py Tue Jun 20 16:33:46 2017 -0400
@@ -126,14 +126,14 @@
__all__ = ['start', 'stop', 'reset', 'display', 'profile']
-skips = set(["util.py:check", "extensions.py:closure",
- "color.py:colorcmd", "dispatch.py:checkargs",
- "dispatch.py:<lambda>", "dispatch.py:_runcatch",
- "dispatch.py:_dispatch", "dispatch.py:_runcommand",
- "pager.py:pagecmd", "dispatch.py:run",
- "dispatch.py:dispatch", "dispatch.py:runcommand",
- "hg.py:<module>", "evolve.py:warnobserrors",
- ])
+skips = {"util.py:check", "extensions.py:closure",
+ "color.py:colorcmd", "dispatch.py:checkargs",
+ "dispatch.py:<lambda>", "dispatch.py:_runcatch",
+ "dispatch.py:_dispatch", "dispatch.py:_runcommand",
+ "pager.py:pagecmd", "dispatch.py:run",
+ "dispatch.py:dispatch", "dispatch.py:runcommand",
+ "hg.py:<module>", "evolve.py:warnobserrors",
+}
###########################################################################
## Utils
--- a/mercurial/store.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/store.py Tue Jun 20 16:33:46 2017 -0400
@@ -15,12 +15,14 @@
from .i18n import _
from . import (
error,
- parsers,
+ policy,
pycompat,
util,
vfs as vfsmod,
)
+parsers = policy.importmod(r'parsers')
+
# This avoids a collision between a file named foo and a dir named
# foo.i or foo.d
def _encodedir(path):
--- a/mercurial/streamclone.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/streamclone.py Tue Jun 20 16:33:46 2017 -0400
@@ -13,6 +13,7 @@
from . import (
branchmap,
error,
+ phases,
store,
util,
)
@@ -80,11 +81,21 @@
streamreqs = remote.capable('streamreqs')
# This is weird and shouldn't happen with modern servers.
if not streamreqs:
+ pullop.repo.ui.warn(_(
+ 'warning: stream clone requested but server has them '
+ 'disabled\n'))
return False, None
streamreqs = set(streamreqs.split(','))
# Server requires something we don't support. Bail.
- if streamreqs - repo.supportedformats:
+ missingreqs = streamreqs - repo.supportedformats
+ if missingreqs:
+ pullop.repo.ui.warn(_(
+ 'warning: stream clone requested but client is missing '
+ 'requirements: %s\n') % ', '.join(sorted(missingreqs)))
+ pullop.repo.ui.warn(
+ _('(see https://www.mercurial-scm.org/wiki/MissingRequirement '
+ 'for more information)\n'))
return False, None
requirements = streamreqs
@@ -152,9 +163,18 @@
repo.invalidate()
-def allowservergeneration(ui):
+def allowservergeneration(repo):
"""Whether streaming clones are allowed from the server."""
- return ui.configbool('server', 'uncompressed', True, untrusted=True)
+ if not repo.ui.configbool('server', 'uncompressed', True, untrusted=True):
+ return False
+
+ # The way stream clone works makes it impossible to hide secret changesets.
+ # So don't allow this by default.
+ secret = phases.hassecret(repo)
+ if secret:
+ return repo.ui.configbool('server', 'uncompressedallowsecret', False)
+
+ return True
# This is it's own function so extensions can override it.
def _walkstreamfiles(repo):
--- a/mercurial/subrepo.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/subrepo.py Tue Jun 20 16:33:46 2017 -0400
@@ -538,7 +538,7 @@
self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
return 1
- def cat(self, match, prefix, **opts):
+ def cat(self, match, fm, fntemplate, prefix, **opts):
return 1
def status(self, rev2, **opts):
@@ -767,10 +767,11 @@
dry_run, similarity)
@annotatesubrepoerror
- def cat(self, match, prefix, **opts):
+ def cat(self, match, fm, fntemplate, prefix, **opts):
rev = self._state[1]
ctx = self._repo[rev]
- return cmdutil.cat(self.ui, self._repo, ctx, match, prefix, **opts)
+ return cmdutil.cat(self.ui, self._repo, ctx, match, fm, fntemplate,
+ prefix, **opts)
@annotatesubrepoerror
def status(self, rev2, **opts):
@@ -1771,7 +1772,7 @@
if exact:
rejected.append(f)
continue
- if not opts.get('dry_run'):
+ if not opts.get(r'dry_run'):
self._gitcommand(command + [f])
for f in rejected:
@@ -1832,7 +1833,7 @@
@annotatesubrepoerror
- def cat(self, match, prefix, **opts):
+ def cat(self, match, fm, fntemplate, prefix, **opts):
rev = self._state[1]
if match.anypats():
return 1 #No support for include/exclude yet
@@ -1840,9 +1841,10 @@
if not match.files():
return 1
+ # TODO: add support for non-plain formatter (see cmdutil.cat())
for f in match.files():
output = self._gitcommand(["show", "%s:%s" % (rev, f)])
- fp = cmdutil.makefileobj(self._subparent, opts.get('output'),
+ fp = cmdutil.makefileobj(self._subparent, fntemplate,
self._ctx.node(),
pathname=self.wvfs.reljoin(prefix, f))
fp.write(output)
@@ -1878,9 +1880,9 @@
deleted, unknown, ignored, clean = [], [], [], []
command = ['status', '--porcelain', '-z']
- if opts.get('unknown'):
+ if opts.get(r'unknown'):
command += ['--untracked-files=all']
- if opts.get('ignored'):
+ if opts.get(r'ignored'):
command += ['--ignored']
out = self._gitcommand(command)
@@ -1908,7 +1910,7 @@
elif st == '!!':
ignored.append(filename1)
- if opts.get('clean'):
+ if opts.get(r'clean'):
out = self._gitcommand(['ls-files'])
for f in out.split('\n'):
if not f in changedfiles:
@@ -1921,7 +1923,7 @@
def diff(self, ui, diffopts, node2, match, prefix, **opts):
node1 = self._state[1]
cmd = ['diff', '--no-renames']
- if opts['stat']:
+ if opts[r'stat']:
cmd.append('--stat')
else:
# for Git, this also implies '-p'
@@ -1964,7 +1966,7 @@
@annotatesubrepoerror
def revert(self, substate, *pats, **opts):
self.ui.status(_('reverting subrepo %s\n') % substate[0])
- if not opts.get('no_backup'):
+ if not opts.get(r'no_backup'):
status = self.status(None)
names = status.modified
for name in names:
@@ -1973,7 +1975,7 @@
(name, bakname))
self.wvfs.rename(name, bakname)
- if not opts.get('dry_run'):
+ if not opts.get(r'dry_run'):
self.get(substate, overwrite=True)
return []
--- a/mercurial/templatefilters.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templatefilters.py Tue Jun 20 16:33:46 2017 -0400
@@ -1,4 +1,4 @@
-# template-filters.py - common template expansion filters
+# templatefilters.py - common template expansion filters
#
# Copyright 2005-2008 Matt Mackall <mpm@selenic.com>
#
@@ -16,6 +16,7 @@
encoding,
hbisect,
node,
+ pycompat,
registrar,
templatekw,
util,
@@ -24,6 +25,9 @@
urlerr = util.urlerr
urlreq = util.urlreq
+if pycompat.ispy3:
+ long = int
+
# filters are callables like:
# fn(obj)
# with:
@@ -226,15 +230,16 @@
elif obj is True:
return 'true'
elif isinstance(obj, (int, long, float)):
- return str(obj)
- elif isinstance(obj, str):
+ return pycompat.bytestr(obj)
+ elif isinstance(obj, bytes):
return '"%s"' % encoding.jsonescape(obj, paranoid=paranoid)
elif util.safehasattr(obj, 'keys'):
- out = ['%s: %s' % (json(k), json(v))
+ out = ['"%s": %s' % (encoding.jsonescape(k, paranoid=paranoid),
+ json(v, paranoid))
for k, v in sorted(obj.iteritems())]
return '{' + ', '.join(out) + '}'
elif util.safehasattr(obj, '__iter__'):
- out = [json(i) for i in obj]
+ out = [json(i, paranoid) for i in obj]
return '[' + ', '.join(out) + ']'
else:
raise TypeError('cannot encode type %s' % obj.__class__.__name__)
@@ -351,11 +356,11 @@
text and concatenating them.
"""
thing = templatekw.unwraphybrid(thing)
- if util.safehasattr(thing, '__iter__') and not isinstance(thing, str):
+ if util.safehasattr(thing, '__iter__') and not isinstance(thing, bytes):
return "".join([stringify(t) for t in thing if t is not None])
if thing is None:
return ""
- return str(thing)
+ return pycompat.bytestr(thing)
@templatefilter('stripdir')
def stripdir(text):
--- a/mercurial/templatekw.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templatekw.py Tue Jun 20 16:33:46 2017 -0400
@@ -8,11 +8,17 @@
from __future__ import absolute_import
from .i18n import _
-from .node import hex, nullid
+from .node import (
+ hex,
+ nullid,
+ short,
+)
+
from . import (
encoding,
error,
hbisect,
+ obsutil,
patch,
registrar,
scmutil,
@@ -158,10 +164,10 @@
template provided by cmdutil.changeset_templater"""
repo = ctx.repo()
if repo.ui.debugflag:
- hexnode = ctx.hex()
+ hexfunc = hex
else:
- hexnode = ctx.hex()[:12]
- return '%d:%s' % (scmutil.intrev(ctx.rev()), hexnode)
+ hexfunc = short
+ return '%d:%s' % (scmutil.intrev(ctx), hexfunc(scmutil.binnode(ctx)))
def getfiles(repo, ctx, revcache):
if 'files' not in revcache:
@@ -555,6 +561,17 @@
return 'obsolete'
return ''
+@templatekeyword("predecessors")
+def showpredecessors(repo, ctx, **args):
+ """Returns the list if the closest visible successors
+ """
+ predecessors = sorted(obsutil.closestpredecessors(repo, ctx.node()))
+ predecessors = map(hex, predecessors)
+
+ return _hybrid(None, predecessors,
+ lambda x: {'ctx': repo[x], 'revcache': {}},
+ lambda d: _formatrevnode(d['ctx']))
+
@templatekeyword('p1rev')
def showp1rev(repo, ctx, templ, **args):
"""Integer. The repository-local revision number of the changeset's
@@ -611,7 +628,7 @@
@templatekeyword('rev')
def showrev(repo, ctx, templ, **args):
"""Integer. The repository-local changeset revision number."""
- return scmutil.intrev(ctx.rev())
+ return scmutil.intrev(ctx)
def showrevslist(name, revs, **args):
"""helper to generate a list of revisions in which a mapped template will
--- a/mercurial/templater.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templater.py Tue Jun 20 16:33:46 2017 -0400
@@ -53,6 +53,7 @@
"""Parse a template expression into a stream of tokens, which must end
with term if specified"""
pos = start
+ program = pycompat.bytestr(program)
while pos < end:
c = program[pos]
if c.isspace(): # skip inter-token whitespace
@@ -959,6 +960,9 @@
return True
except error.RevlogError:
return False
+ except error.WdirUnsupported:
+ # single 'ff...' match
+ return True
shortest = node
startlength = max(6, minlength)
@@ -1294,6 +1298,10 @@
(self.map[t][1], inst.args[1]))
return self.cache[t]
+ def render(self, mapping):
+ """Render the default unnamed template and return result as string"""
+ return stringify(self('', **mapping))
+
def __call__(self, t, **mapping):
ttype = t in self.map and self.map[t][0] or 'default'
if ttype not in self.ecache:
--- a/mercurial/templates/gitweb/bookmarks.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/gitweb/bookmarks.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -13,6 +13,7 @@
</div>
<div class="page_nav">
+<div>
<a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
<a href="{url|urlescape}shortlog{sessionvars%urlparameter}">shortlog</a> |
<a href="{url|urlescape}log{sessionvars%urlparameter}">changelog</a> |
@@ -22,7 +23,8 @@
<a href="{url|urlescape}branches{sessionvars%urlparameter}">branches</a> |
<a href="{url|urlescape}file{sessionvars%urlparameter}">files</a> |
<a href="{url|urlescape}help{sessionvars%urlparameter}">help</a>
-<br/>
+</div>
+{searchform}
</div>
<div class="title"> </div>
--- a/mercurial/templates/gitweb/branches.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/gitweb/branches.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -13,6 +13,7 @@
</div>
<div class="page_nav">
+<div>
<a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
<a href="{url|urlescape}shortlog{sessionvars%urlparameter}">shortlog</a> |
<a href="{url|urlescape}log{sessionvars%urlparameter}">changelog</a> |
@@ -22,7 +23,8 @@
branches |
<a href="{url|urlescape}file{sessionvars%urlparameter}">files</a> |
<a href="{url|urlescape}help{sessionvars%urlparameter}">help</a>
-<br/>
+</div>
+{searchform}
</div>
<div class="title"> </div>
--- a/mercurial/templates/gitweb/changelog.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/gitweb/changelog.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -12,14 +12,8 @@
<a href="/">Mercurial</a> {pathdef%breadcrumb} / changelog
</div>
-<form action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<div class="search">
-<input type="text" name="rev" />
-</div>
-</form>
-
<div class="page_nav">
+<div>
<a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
<a href="{url|urlescape}shortlog/{symrev}{sessionvars%urlparameter}">shortlog</a> |
changelog |
@@ -32,6 +26,8 @@
<br/>
{changenav%nav}<br/>
</div>
+{searchform}
+</div>
{entries%changelogentry}
--- a/mercurial/templates/gitweb/changeset.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/gitweb/changeset.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -13,6 +13,7 @@
</div>
<div class="page_nav">
+<div>
<a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
<a href="{url|urlescape}shortlog/{symrev}{sessionvars%urlparameter}">shortlog</a> |
<a href="{url|urlescape}log/{symrev}{sessionvars%urlparameter}">changelog</a> |
@@ -24,7 +25,8 @@
changeset |
<a href="{url|urlescape}raw-rev/{symrev}">raw</a> {archives%archiveentry} |
<a href="{url|urlescape}help{sessionvars%urlparameter}">help</a>
-<br/>
+</div>
+{searchform}
</div>
<div>
--- a/mercurial/templates/gitweb/error.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/gitweb/error.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -13,6 +13,7 @@
</div>
<div class="page_nav">
+<div>
<a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
<a href="{url|urlescape}shortlog{sessionvars%urlparameter}">shortlog</a> |
<a href="{url|urlescape}log{sessionvars%urlparameter}">changelog</a> |
@@ -21,7 +22,8 @@
<a href="{url|urlescape}branches{sessionvars%urlparameter}">branches</a> |
<a href="{url|urlescape}file{sessionvars%urlparameter}">files</a> |
<a href="{url|urlescape}help{sessionvars%urlparameter}">help</a>
-<br/>
+</div>
+{searchform}
</div>
<div class="page_body">
--- a/mercurial/templates/gitweb/fileannotate.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/gitweb/fileannotate.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -13,6 +13,7 @@
</div>
<div class="page_nav">
+<div>
<a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
<a href="{url|urlescape}shortlog{sessionvars%urlparameter}">shortlog</a> |
<a href="{url|urlescape}log{sessionvars%urlparameter}">changelog</a> |
@@ -30,7 +31,8 @@
<a href="{url|urlescape}comparison/{symrev}/{file|urlescape}{sessionvars%urlparameter}">comparison</a> |
<a href="{url|urlescape}raw-file/{symrev}/{file|urlescape}">raw</a> |
<a href="{url|urlescape}help{sessionvars%urlparameter}">help</a>
-<br/>
+</div>
+{searchform}
</div>
<div class="title">{file|escape}</div>
--- a/mercurial/templates/gitweb/filecomparison.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/gitweb/filecomparison.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -13,6 +13,7 @@
</div>
<div class="page_nav">
+<div>
<a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
<a href="{url|urlescape}shortlog{sessionvars%urlparameter}">shortlog</a> |
<a href="{url|urlescape}log{sessionvars%urlparameter}">changelog</a> |
@@ -30,7 +31,8 @@
comparison |
<a href="{url|urlescape}raw-diff/{symrev}/{file|urlescape}">raw</a> |
<a href="{url|urlescape}help{sessionvars%urlparameter}">help</a>
-<br/>
+</div>
+{searchform}
</div>
<div class="title">{file|escape}</div>
--- a/mercurial/templates/gitweb/filediff.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/gitweb/filediff.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -13,6 +13,7 @@
</div>
<div class="page_nav">
+<div>
<a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
<a href="{url|urlescape}shortlog{sessionvars%urlparameter}">shortlog</a> |
<a href="{url|urlescape}log{sessionvars%urlparameter}">changelog</a> |
@@ -30,7 +31,8 @@
<a href="{url|urlescape}comparison/{symrev}/{file|urlescape}{sessionvars%urlparameter}">comparison</a> |
<a href="{url|urlescape}raw-diff/{symrev}/{file|urlescape}">raw</a> |
<a href="{url|urlescape}help{sessionvars%urlparameter}">help</a>
-<br/>
+</div>
+{searchform}
</div>
<div class="title">{file|escape}</div>
--- a/mercurial/templates/gitweb/filelog.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/gitweb/filelog.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -13,6 +13,7 @@
</div>
<div class="page_nav">
+<div>
<a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
<a href="{url|urlescape}shortlog{sessionvars%urlparameter}">shortlog</a> |
<a href="{url|urlescape}log{sessionvars%urlparameter}">changelog</a> |
@@ -30,6 +31,8 @@
<br/>
{nav%filenav}
</div>
+{searchform}
+</div>
<div class="title" >
{file|urlescape}{if(linerange,
--- a/mercurial/templates/gitweb/filerevision.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/gitweb/filerevision.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -13,6 +13,7 @@
</div>
<div class="page_nav">
+<div>
<a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
<a href="{url|urlescape}shortlog{sessionvars%urlparameter}">shortlog</a> |
<a href="{url|urlescape}log{sessionvars%urlparameter}">changelog</a> |
@@ -30,7 +31,8 @@
<a href="{url|urlescape}comparison/{symrev}/{file|urlescape}{sessionvars%urlparameter}">comparison</a> |
<a href="{url|urlescape}raw-file/{symrev}/{file|urlescape}">raw</a> |
<a href="{url|urlescape}help{sessionvars%urlparameter}">help</a>
-<br/>
+</div>
+{searchform}
</div>
<div class="title">{file|escape}</div>
--- a/mercurial/templates/gitweb/graph.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/gitweb/graph.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -13,13 +13,8 @@
<a href="/">Mercurial</a> {pathdef%breadcrumb} / graph
</div>
-<form action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<div class="search">
-<input type="text" name="rev" />
-</div>
-</form>
<div class="page_nav">
+<div>
<a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
<a href="{url|urlescape}shortlog/{symrev}{sessionvars%urlparameter}">shortlog</a> |
<a href="{url|urlescape}log/{symrev}{sessionvars%urlparameter}">changelog</a> |
@@ -32,7 +27,9 @@
<br/>
<a href="{url|urlescape}graph/{symrev}{lessvars%urlparameter}">less</a>
<a href="{url|urlescape}graph/{symrev}{morevars%urlparameter}">more</a>
-| {changenav%navgraph}<br/>
+| {changenav%navgraph}
+</div>
+{searchform}
</div>
<div class="title"> </div>
--- a/mercurial/templates/gitweb/help.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/gitweb/help.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -13,6 +13,7 @@
</div>
<div class="page_nav">
+<div>
<a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
<a href="{url|urlescape}shortlog{sessionvars%urlparameter}">shortlog</a> |
<a href="{url|urlescape}log{sessionvars%urlparameter}">changelog</a> |
@@ -22,7 +23,8 @@
<a href="{url|urlescape}branches{sessionvars%urlparameter}">branches</a> |
<a href="{url|urlescape}file{sessionvars%urlparameter}">files</a> |
help
-<br/>
+</div>
+{searchform}
</div>
<div class="title"> </div>
--- a/mercurial/templates/gitweb/helptopics.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/gitweb/helptopics.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -13,6 +13,7 @@
</div>
<div class="page_nav">
+<div>
<a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
<a href="{url|urlescape}shortlog{sessionvars%urlparameter}">shortlog</a> |
<a href="{url|urlescape}log{sessionvars%urlparameter}">changelog</a> |
@@ -25,7 +26,8 @@
'<a href="{url|urlescape}help{sessionvars%urlparameter}">help</a>',
'help'
)}
-<br/>
+</div>
+{searchform}
</div>
<div class="title"> </div>
--- a/mercurial/templates/gitweb/manifest.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/gitweb/manifest.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -13,6 +13,7 @@
</div>
<div class="page_nav">
+<div>
<a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
<a href="{url|urlescape}shortlog{sessionvars%urlparameter}">shortlog</a> |
<a href="{url|urlescape}log{sessionvars%urlparameter}">changelog</a> |
@@ -23,7 +24,8 @@
files |
<a href="{url|urlescape}rev/{symrev}{sessionvars%urlparameter}">changeset</a> {archives%archiveentry} |
<a href="{url|urlescape}help{sessionvars%urlparameter}">help</a>
-<br/>
+</div>
+{searchform}
</div>
<div class="title">{path|escape} <span class="logtags">{inbranch%inbranchtag}{branches%branchtag}{tags%tagtag}{bookmarks%bookmarktag}</span></div>
--- a/mercurial/templates/gitweb/map Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/gitweb/map Tue Jun 20 16:33:46 2017 -0400
@@ -323,3 +323,14 @@
urlparameter = '{separator}{name}={value|urlescape}'
hiddenformentry = '<input type="hidden" name="{name}" value="{value|escape}" />'
breadcrumb = '> <a href="{url|urlescape}">{name|escape}</a> '
+
+searchform = '
+ <div class="search">
+ <form id="searchform" action="{url|urlescape}log">
+ {sessionvars%hiddenformentry}
+ <input name="rev" type="text" value="{query|escape}" size="40" />
+ <div id="hint">{searchhint}</div>
+ </form>
+ </div>'
+searchhint = 'Find changesets by keywords (author, files, the commit message), revision
+ number or hash, or <a href="{url|urlescape}help/revsets">revset expression</a>.'
--- a/mercurial/templates/gitweb/search.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/gitweb/search.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -10,16 +10,10 @@
<div class="page_header">
<a href="{logourl}" title="Mercurial" style="float: right;">Mercurial</a>
<a href="/">Mercurial</a> {pathdef%breadcrumb} / search
-
-<form action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<div class="search">
-<input type="text" name="rev" value="{query|escape}" />
-</div>
-</form>
</div>
<div class="page_nav">
+<div>
<a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
<a href="{url|urlescape}shortlog{sessionvars%urlparameter}">shortlog</a> |
<a href="{url|urlescape}log{sessionvars%urlparameter}">changelog</a> |
@@ -30,7 +24,8 @@
<a href="{url|urlescape}file{sessionvars%urlparameter}">files</a>{archives%archiveentry}
|
<a href="{url|urlescape}help{sessionvars%urlparameter}">help</a>
-<br/>
+</div>
+{searchform}
</div>
<div class="title">searching for {query|escape}</div>
--- a/mercurial/templates/gitweb/shortlog.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/gitweb/shortlog.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -12,13 +12,8 @@
<a href="/">Mercurial</a> {pathdef%breadcrumb} / shortlog
</div>
-<form action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<div class="search">
-<input type="text" name="rev" />
-</div>
-</form>
<div class="page_nav">
+<div>
<a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
shortlog |
<a href="{url|urlescape}log/{symrev}{sessionvars%urlparameter}">changelog</a> |
@@ -30,6 +25,8 @@
<a href="{url|urlescape}help{sessionvars%urlparameter}">help</a>
<br/>{changenav%navshort}<br/>
</div>
+{searchform}
+</div>
<div class="title"> </div>
<table class="shortlogtable" cellspacing="0">
--- a/mercurial/templates/gitweb/summary.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/gitweb/summary.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -10,15 +10,10 @@
<div class="page_header">
<a href="{logourl}" title="Mercurial" style="float: right;">Mercurial</a>
<a href="/">Mercurial</a> {pathdef%breadcrumb} / summary
-<form action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<div class="search">
-<input type="text" name="rev" />
-</div>
-</form>
</div>
<div class="page_nav">
+<div>
summary |
<a href="{url|urlescape}shortlog{sessionvars%urlparameter}">shortlog</a> |
<a href="{url|urlescape}log{sessionvars%urlparameter}">changelog</a> |
@@ -28,7 +23,8 @@
<a href="{url|urlescape}branches{sessionvars%urlparameter}">branches</a> |
<a href="{url|urlescape}file{sessionvars%urlparameter}">files</a>{archives%archiveentry} |
<a href="{url|urlescape}help{sessionvars%urlparameter}">help</a>
-<br/>
+</div>
+{searchform}
</div>
<div class="title"> </div>
--- a/mercurial/templates/gitweb/tags.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/gitweb/tags.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -13,6 +13,7 @@
</div>
<div class="page_nav">
+<div>
<a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
<a href="{url|urlescape}shortlog{sessionvars%urlparameter}">shortlog</a> |
<a href="{url|urlescape}log{sessionvars%urlparameter}">changelog</a> |
@@ -22,7 +23,8 @@
<a href="{url|urlescape}branches{sessionvars%urlparameter}">branches</a> |
<a href="{url|urlescape}file{sessionvars%urlparameter}">files</a> |
<a href="{url|urlescape}help{sessionvars%urlparameter}">help</a>
-<br/>
+</div>
+{searchform}
</div>
<div class="title"> </div>
--- a/mercurial/templates/monoblue/bookmarks.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/monoblue/bookmarks.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -9,13 +9,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / bookmarks</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li><a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a></li>
--- a/mercurial/templates/monoblue/branches.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/monoblue/branches.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -9,13 +9,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / branches</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li><a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a></li>
--- a/mercurial/templates/monoblue/changelog.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/monoblue/changelog.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -9,13 +9,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / changelog</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li><a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a></li>
--- a/mercurial/templates/monoblue/changeset.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/monoblue/changeset.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -9,13 +9,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / changeset</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li><a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a></li>
--- a/mercurial/templates/monoblue/error.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/monoblue/error.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -9,13 +9,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / not found: {repo|escape}</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li><a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a></li>
--- a/mercurial/templates/monoblue/fileannotate.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/monoblue/fileannotate.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -9,13 +9,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / annotate</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li><a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a></li>
--- a/mercurial/templates/monoblue/filecomparison.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/monoblue/filecomparison.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -9,13 +9,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / file comparison</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li><a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a></li>
--- a/mercurial/templates/monoblue/filediff.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/monoblue/filediff.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -9,13 +9,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / file diff</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li><a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a></li>
--- a/mercurial/templates/monoblue/filelog.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/monoblue/filelog.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -9,13 +9,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / file revisions</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li><a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a></li>
--- a/mercurial/templates/monoblue/filerevision.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/monoblue/filerevision.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -9,13 +9,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / file revision</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li><a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a></li>
--- a/mercurial/templates/monoblue/graph.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/monoblue/graph.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -10,13 +10,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / graph</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li><a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a></li>
--- a/mercurial/templates/monoblue/help.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/monoblue/help.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -9,13 +9,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / help</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li><a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a></li>
--- a/mercurial/templates/monoblue/helptopics.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/monoblue/helptopics.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -9,13 +9,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / help</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li><a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a></li>
--- a/mercurial/templates/monoblue/manifest.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/monoblue/manifest.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -9,13 +9,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / files</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li><a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a></li>
--- a/mercurial/templates/monoblue/map Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/monoblue/map Tue Jun 20 16:33:46 2017 -0400
@@ -279,3 +279,12 @@
hiddenformentry = '<input type="hidden" name="{name}" value="{value|escape}" />'
graph = graph.tmpl
breadcrumb = '> <a href="{url|urlescape}">{name|escape}</a> '
+
+searchform = '
+ <form action="{url|urlescape}log">
+ {sessionvars%hiddenformentry}
+ <dl class="search">
+ <dt><label>Search: </label></dt>
+ <dd><input type="text" name="rev" value="{query|escape}" /></dd>
+ </dl>
+ </form>'
--- a/mercurial/templates/monoblue/notfound.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/monoblue/notfound.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -9,13 +9,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / not found: {repo|escape}</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li><a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a></li>
--- a/mercurial/templates/monoblue/search.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/monoblue/search.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -9,13 +9,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / search</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" value="{query|escape}" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li><a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a></li>
--- a/mercurial/templates/monoblue/shortlog.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/monoblue/shortlog.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -9,13 +9,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / shortlog</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li><a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a></li>
--- a/mercurial/templates/monoblue/summary.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/monoblue/summary.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -9,13 +9,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / summary</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li class="current">summary</li>
--- a/mercurial/templates/monoblue/tags.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/monoblue/tags.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -9,13 +9,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / tags</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li><a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a></li>
--- a/mercurial/templates/paper/bookmarks.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/paper/bookmarks.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -34,11 +34,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb}</h2>
<h3>bookmarks</h3>
-<form class="search" action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<p><input name="rev" id="search1" type="text" size="30" /></p>
-<div id="hint">{searchhint}</div>
-</form>
+{searchform}
<table class="bigtable">
<thead>
--- a/mercurial/templates/paper/branches.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/paper/branches.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -34,11 +34,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb}</h2>
<h3>branches</h3>
-<form class="search" action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<p><input name="rev" id="search1" type="text" size="30" /></p>
-<div id="hint">{searchhint}</div>
-</form>
+{searchform}
<table class="bigtable">
<thead>
--- a/mercurial/templates/paper/changeset.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/paper/changeset.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -36,11 +36,7 @@
{changesetbranch%changelogbranchname}{changesettag}{changesetbookmark}
</h3>
-<form class="search" action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<p><input name="rev" id="search1" type="text" size="30" /></p>
-<div id="hint">{searchhint}</div>
-</form>
+{searchform}
<div class="description">{desc|strip|escape|websub|nonempty}</div>
--- a/mercurial/templates/paper/error.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/paper/error.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -26,11 +26,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb}</h2>
<h3>error</h3>
-<form class="search" action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<p><input name="rev" id="search1" type="text" size="30"></p>
-<div id="hint">{searchhint}</div>
-</form>
+{searchform}
<div class="description">
<p>
--- a/mercurial/templates/paper/fileannotate.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/paper/fileannotate.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -42,11 +42,7 @@
{branch%changelogbranchname}{tags%changelogtag}{bookmarks%changelogtag}
</h3>
-<form class="search" action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<p><input name="rev" id="search1" type="text" size="30" /></p>
-<div id="hint">{searchhint}</div>
-</form>
+{searchform}
<div class="description">{desc|strip|escape|websub|nonempty}</div>
--- a/mercurial/templates/paper/filecomparison.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/paper/filecomparison.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -41,11 +41,7 @@
{branch%changelogbranchname}{tags%changelogtag}{bookmarks%changelogtag}
</h3>
-<form class="search" action="{url|urlescape}log">
-<p>{sessionvars%hiddenformentry}</p>
-<p><input name="rev" id="search1" type="text" size="30" /></p>
-<div id="hint">{searchhint}</div>
-</form>
+{searchform}
<div class="description">{desc|strip|escape|websub|nonempty}</div>
--- a/mercurial/templates/paper/filediff.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/paper/filediff.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -41,11 +41,7 @@
{branch%changelogbranchname}{tags%changelogtag}{bookmarks%changelogtag}
</h3>
-<form class="search" action="{url|urlescape}log">
-<p>{sessionvars%hiddenformentry}</p>
-<p><input name="rev" id="search1" type="text" size="30" /></p>
-<div id="hint">{searchhint}</div>
-</form>
+{searchform}
<div class="description">{desc|strip|escape|websub|nonempty}</div>
--- a/mercurial/templates/paper/filelog.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/paper/filelog.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -51,11 +51,7 @@
' (following lines {linerange}{if(descend, ', descending')} <a href="{url|urlescape}log/{symrev}/{file|urlescape}{sessionvars%urlparameter}">back to filelog</a>)')}
</h3>
-<form class="search" action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<p><input name="rev" id="search1" type="text" size="30" /></p>
-<div id="hint">{searchhint}</div>
-</form>
+{searchform}
<div class="navigate">
<a href="{url|urlescape}log/{symrev}/{file|urlescape}{lessvars%urlparameter}">less</a>
--- a/mercurial/templates/paper/filerevision.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/paper/filerevision.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -41,11 +41,7 @@
{branch%changelogbranchname}{tags%changelogtag}{bookmarks%changelogtag}
</h3>
-<form class="search" action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<p><input name="rev" id="search1" type="text" size="30" /></p>
-<div id="hint">{searchhint}</div>
-</form>
+{searchform}
<div class="description">{desc|strip|escape|websub|nonempty}</div>
--- a/mercurial/templates/paper/graph.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/paper/graph.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -39,11 +39,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb}</h2>
<h3>graph</h3>
-<form class="search" action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<p><input name="rev" id="search1" type="text" size="30" /></p>
-<div id="hint">{searchhint}</div>
-</form>
+{searchform}
<div class="navigate">
<a href="{url|urlescape}graph/{symrev}{lessvars%urlparameter}">less</a>
--- a/mercurial/templates/paper/help.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/paper/help.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -24,12 +24,7 @@
<div class="main">
<h2 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb}</h2>
<h3>Help: {topic}</h3>
-
-<form class="search" action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<p><input name="rev" id="search1" type="text" size="30" /></p>
-<div id="hint">{searchhint}</div>
-</form>
+{searchform}
<div id="doc">
{rstdoc(doc, "html")}
</div>
--- a/mercurial/templates/paper/helptopics.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/paper/helptopics.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -26,11 +26,7 @@
<div class="main">
<h2 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb}</h2>
-<form class="search" action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<p><input name="rev" id="search1" type="text" size="30" /></p>
-<div id="hint">{searchhint}</div>
-</form>
+{searchform}
<table class="bigtable">
<tr><td colspan="2"><h2><a name="topics" href="#topics">Topics</a></h2></td></tr>
{topics % helpentry}
--- a/mercurial/templates/paper/manifest.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/paper/manifest.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -35,11 +35,7 @@
{branch%changelogbranchname}{tags%changelogtag}{bookmarks%changelogtag}
</h3>
-<form class="search" action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<p><input name="rev" id="search1" type="text" size="30" /></p>
-<div id="hint">{searchhint}</div>
-</form>
+{searchform}
<table class="bigtable">
<thead>
--- a/mercurial/templates/paper/map Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/paper/map Tue Jun 20 16:33:46 2017 -0400
@@ -243,5 +243,11 @@
hiddenformentry = '<input type="hidden" name="{name}" value="{value|escape}" />'
breadcrumb = '> <a href="{url|urlescape}">{name|escape}</a> '
+searchform = '
+ <form class="search" action="{url|urlescape}log">
+ {sessionvars%hiddenformentry}
+ <p><input name="rev" id="search1" type="text" size="30" value="{query|escape}" /></p>
+ <div id="hint">{searchhint}</div>
+ </form>'
searchhint = 'Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="{url|urlescape}help/revsets">revset expression</a>.'
--- a/mercurial/templates/paper/search.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/paper/search.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -33,11 +33,7 @@
Use {showunforcekw}</a> instead.')}
</p>
-<form class="search" action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<p><input name="rev" id="search1" type="text" size="30" value="{query|escape}"></p>
-<div id="hint">{searchhint}</div>
-</form>
+{searchform}
<div class="navigate">
<a href="{url|urlescape}log{lessvars%urlparameter}">less</a>
--- a/mercurial/templates/paper/shortlog.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/paper/shortlog.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -41,11 +41,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb}</h2>
<h3>log</h3>
-<form class="search" action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<p><input name="rev" id="search1" type="text" size="30" value="{query|escape}" /></p>
-<div id="hint">{searchhint}</div>
-</form>
+{searchform}
<div class="navigate">
<a href="{url|urlescape}shortlog/{symrev}{lessvars%urlparameter}">less</a>
--- a/mercurial/templates/paper/tags.tmpl Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/paper/tags.tmpl Tue Jun 20 16:33:46 2017 -0400
@@ -34,11 +34,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb}</h2>
<h3>tags</h3>
-<form class="search" action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<p><input name="rev" id="search1" type="text" size="30" /></p>
-<div id="hint">{searchhint}</div>
-</form>
+{searchform}
<table class="bigtable">
<thead>
--- a/mercurial/templates/static/style-gitweb.css Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/templates/static/style-gitweb.css Tue Jun 20 16:33:46 2017 -0400
@@ -4,7 +4,12 @@
div.page_header { height:25px; padding:8px; font-size:18px; font-weight:bold; background-color:#d9d8d1; }
div.page_header a:visited { color:#0000cc; }
div.page_header a:hover { color:#880000; }
-div.page_nav { padding:8px; }
+div.page_nav {
+ padding:8px;
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+}
div.page_nav a:visited { color:#0000cc; }
div.page_path { padding:8px; border:solid #d9d8d1; border-width:0px 0px 1px}
div.page_footer { padding:4px 8px; background-color: #d9d8d1; }
@@ -52,7 +57,23 @@
div.pre { font-family:monospace; font-size:12px; white-space:pre; }
div.diff_info { font-family:monospace; color:#000099; background-color:#edece6; font-style:italic; }
div.index_include { border:solid #d9d8d1; border-width:0px 0px 1px; padding:12px 8px; }
-div.search { margin:4px 8px; position:absolute; top:56px; right:12px }
+
+.search {
+ margin-right: 8px;
+}
+
+div#hint {
+ position: absolute;
+ display: none;
+ width: 250px;
+ padding: 5px;
+ background: #ffc;
+ border: 1px solid yellow;
+ border-radius: 5px;
+}
+
+#searchform:hover div#hint { display: block; }
+
tr.thisrev a { color:#999999; text-decoration: none; }
tr.thisrev pre { color:#009900; }
td.annotate {
--- a/mercurial/transaction.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/transaction.py Tue Jun 20 16:33:46 2017 -0400
@@ -26,10 +26,10 @@
# These are the file generators that should only be executed after the
# finalizers are done, since they rely on the output of the finalizers (like
# the changelog having been written).
-postfinalizegenerators = set([
+postfinalizegenerators = {
'bookmarks',
'dirstate'
-])
+}
gengroupall='all'
gengroupprefinalize='prefinalize'
@@ -137,6 +137,10 @@
releasefn = lambda tr, success: None
self.releasefn = releasefn
+ # A dict dedicated to precisely tracking the changes introduced in the
+ # transaction.
+ self.changes = {}
+
# a dict of arguments to be passed to hooks
self.hookargs = {}
self.file = opener.open(self.journal, "w")
@@ -427,6 +431,7 @@
'''commit the transaction'''
if self.count == 1:
self.validator(self) # will raise exception if needed
+ self.validator = None # Help prevent cycles.
self._generatefiles(group=gengroupprefinalize)
categories = sorted(self._finalizecallback)
for cat in categories:
@@ -460,6 +465,7 @@
self._writeundo()
if self.after:
self.after()
+ self.after = None # Help prevent cycles.
if self.opener.isfile(self._backupjournal):
self.opener.unlink(self._backupjournal)
if self.opener.isfile(self.journal):
@@ -483,6 +489,7 @@
self.journal = None
self.releasefn(self, True) # notify success of closing transaction
+ self.releasefn = None # Help prevent cycles.
# run post close action
categories = sorted(self._postclosecallback)
@@ -553,6 +560,7 @@
finally:
self.journal = None
self.releasefn(self, False) # notify failure of transaction
+ self.releasefn = None # Help prevent cycles.
def rollback(opener, vfsmap, file, report):
"""Rolls back the transaction contained in the given file
--- a/mercurial/ui.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/ui.py Tue Jun 20 16:33:46 2017 -0400
@@ -43,6 +43,20 @@
_keepalnum = ''.join(c for c in map(pycompat.bytechr, range(256))
if not c.isalnum())
+# The config knobs that will be altered (if unset) by ui.tweakdefaults.
+tweakrc = """
+[ui]
+# The rollback command is dangerous. As a rule, don't use it.
+rollback = False
+
+[commands]
+# Make `hg status` emit cwd-relative paths by default.
+status.relative = yes
+
+[diff]
+git = 1
+"""
+
samplehgrcs = {
'user':
"""# example user config (see 'hg help config' for more info)
@@ -182,6 +196,7 @@
self.fin = src.fin
self.pageractive = src.pageractive
self._disablepager = src._disablepager
+ self._tweaked = src._tweaked
self._tcfg = src._tcfg.copy()
self._ucfg = src._ucfg.copy()
@@ -205,6 +220,7 @@
self.fin = util.stdin
self.pageractive = False
self._disablepager = False
+ self._tweaked = False
# shared read-only environment
self.environ = encoding.environ
@@ -241,8 +257,29 @@
u.fixconfig(section=section)
else:
raise error.ProgrammingError('unknown rctype: %s' % t)
+ u._maybetweakdefaults()
return u
+ def _maybetweakdefaults(self):
+ if not self.configbool('ui', 'tweakdefaults'):
+ return
+ if self._tweaked or self.plain('tweakdefaults'):
+ return
+
+ # Note: it is SUPER IMPORTANT that you set self._tweaked to
+ # True *before* any calls to setconfig(), otherwise you'll get
+ # infinite recursion between setconfig and this method.
+ #
+ # TODO: We should extract an inner method in setconfig() to
+ # avoid this weirdness.
+ self._tweaked = True
+ tmpcfg = config.config()
+ tmpcfg.parse('<tweakdefaults>', tweakrc)
+ for section in tmpcfg:
+ for name, value in tmpcfg.items(section):
+ if not self.hasconfig(section, name):
+ self.setconfig(section, name, value, "<tweakdefaults>")
+
def copy(self):
return self.__class__(self)
@@ -263,7 +300,7 @@
(util.timer() - starttime) * 1000
def formatter(self, topic, opts):
- return formatter.formatter(self, topic, opts)
+ return formatter.formatter(self, self, topic, opts)
def _trusted(self, fp, f):
st = util.fstat(fp)
@@ -387,6 +424,7 @@
for cfg in (self._ocfg, self._tcfg, self._ucfg):
cfg.set(section, name, value, source)
self.fixconfig(section=section)
+ self._maybetweakdefaults()
def _data(self, untrusted):
return untrusted and self._ucfg or self._tcfg
@@ -522,7 +560,7 @@
return default
try:
return convert(v)
- except ValueError:
+ except (ValueError, error.ParseError):
if desc is None:
desc = convert.__name__
raise error.ConfigError(_("%s.%s is not a valid %s ('%s')")
@@ -598,6 +636,19 @@
return self.configwith(config.parselist, section, name, default or [],
'list', untrusted)
+ def configdate(self, section, name, default=None, untrusted=False):
+ """parse a configuration element as a tuple of ints
+
+ >>> u = ui(); s = 'foo'
+ >>> u.setconfig(s, 'date', '0 0')
+ >>> u.configdate(s, 'date')
+ (0, 0)
+ """
+ if self.config(section, name, default, untrusted):
+ return self.configwith(util.parsedate, section, name, default,
+ 'date', untrusted)
+ return default
+
def hasconfig(self, section, name, untrusted=False):
return self._data(untrusted).hasitem(section, name)
--- a/mercurial/upgrade.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/upgrade.py Tue Jun 20 16:33:46 2017 -0400
@@ -28,12 +28,12 @@
An upgrade will not be allowed if the repository doesn't have the
requirements returned by this function.
"""
- return set([
+ return {
# Introduced in Mercurial 0.9.2.
'revlogv1',
# Introduced in Mercurial 0.9.2.
'store',
- ])
+ }
def blocksourcerequirements(repo):
"""Obtain requirements that will prevent an upgrade from occurring.
@@ -41,7 +41,7 @@
An upgrade cannot be performed if the source repository contains a
requirements in the returned set.
"""
- return set([
+ return {
# The upgrade code does not yet support these experimental features.
# This is an artificial limitation.
'manifestv2',
@@ -51,7 +51,7 @@
'parentdelta',
# Upgrade should operate on the actual store, not the shared link.
'shared',
- ])
+ }
def supportremovedrequirements(repo):
"""Obtain requirements that can be removed during an upgrade.
@@ -70,13 +70,13 @@
Extensions should monkeypatch this to add their custom requirements.
"""
- return set([
+ return {
'dotencode',
'fncache',
'generaldelta',
'revlogv1',
'store',
- ])
+ }
def allowednewrequirements(repo):
"""Obtain requirements that can be added to a repository during upgrade.
@@ -88,11 +88,11 @@
bad additions because the whitelist approach is safer and will prevent
future, unknown requirements from accidentally being added.
"""
- return set([
+ return {
'dotencode',
'fncache',
'generaldelta',
- ])
+ }
deficiency = 'deficiency'
optimisation = 'optimization'
@@ -628,7 +628,7 @@
ui.write(_('marking source repository as being upgraded; clients will be '
'unable to read from repository\n'))
scmutil.writerequires(srcrepo.vfs,
- srcrepo.requirements | set(['upgradeinprogress']))
+ srcrepo.requirements | {'upgradeinprogress'})
ui.write(_('starting in-place swap of repository data\n'))
ui.write(_('replaced files will be backed up at %s\n') %
--- a/mercurial/util.h Tue Jun 13 22:24:41 2017 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,45 +0,0 @@
-/*
- util.h - utility functions for interfacing with the various python APIs.
-
- This software may be used and distributed according to the terms of
- the GNU General Public License, incorporated herein by reference.
-*/
-
-#ifndef _HG_UTIL_H_
-#define _HG_UTIL_H_
-
-#include "compat.h"
-
-#if PY_MAJOR_VERSION >= 3
-#define IS_PY3K
-#endif
-
-typedef struct {
- PyObject_HEAD
- char state;
- int mode;
- int size;
- int mtime;
-} dirstateTupleObject;
-
-extern PyTypeObject dirstateTupleType;
-#define dirstate_tuple_check(op) (Py_TYPE(op) == &dirstateTupleType)
-
-/* This should be kept in sync with normcasespecs in encoding.py. */
-enum normcase_spec {
- NORMCASE_LOWER = -1,
- NORMCASE_UPPER = 1,
- NORMCASE_OTHER = 0
-};
-
-#define MIN(a, b) (((a)<(b))?(a):(b))
-/* VC9 doesn't include bool and lacks stdbool.h based on my searching */
-#if defined(_MSC_VER) || __STDC_VERSION__ < 199901L
-#define true 1
-#define false 0
-typedef unsigned char bool;
-#else
-#include <stdbool.h>
-#endif
-
-#endif /* _HG_UTIL_H_ */
--- a/mercurial/util.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/util.py Tue Jun 20 16:33:46 2017 -0400
@@ -45,11 +45,17 @@
encoding,
error,
i18n,
- osutil,
- parsers,
+ policy,
pycompat,
)
+base85 = policy.importmod(r'base85')
+osutil = policy.importmod(r'osutil')
+parsers = policy.importmod(r'parsers')
+
+b85decode = base85.b85decode
+b85encode = base85.b85encode
+
cookielib = pycompat.cookielib
empty = pycompat.empty
httplib = pycompat.httplib
@@ -105,6 +111,7 @@
hidewindow = platform.hidewindow
isexec = platform.isexec
isowner = platform.isowner
+listdir = osutil.listdir
localpath = platform.localpath
lookupreg = platform.lookupreg
makedir = platform.makedir
@@ -142,6 +149,15 @@
unlink = platform.unlink
username = platform.username
+try:
+ recvfds = osutil.recvfds
+except AttributeError:
+ pass
+try:
+ setprocname = osutil.setprocname
+except AttributeError:
+ pass
+
# Python compatibility
_notset = object()
@@ -556,54 +572,22 @@
return f
-class sortdict(dict):
- '''a simple sorted dictionary'''
- def __init__(self, data=None):
- self._list = []
- if data:
- self.update(data)
- def copy(self):
- return sortdict(self)
- def __setitem__(self, key, val):
+class sortdict(collections.OrderedDict):
+ '''a simple sorted dictionary
+
+ >>> d1 = sortdict([('a', 0), ('b', 1)])
+ >>> d2 = d1.copy()
+ >>> d2
+ sortdict([('a', 0), ('b', 1)])
+ >>> d2.update([('a', 2)])
+ >>> d2.keys() # should still be in last-set order
+ ['b', 'a']
+ '''
+
+ def __setitem__(self, key, value):
if key in self:
- self._list.remove(key)
- self._list.append(key)
- dict.__setitem__(self, key, val)
- def __iter__(self):
- return self._list.__iter__()
- def update(self, src):
- if isinstance(src, dict):
- src = src.iteritems()
- for k, v in src:
- self[k] = v
- def clear(self):
- dict.clear(self)
- self._list = []
- def items(self):
- return [(k, self[k]) for k in self._list]
- def __delitem__(self, key):
- dict.__delitem__(self, key)
- self._list.remove(key)
- def pop(self, key, *args, **kwargs):
- try:
- self._list.remove(key)
- except ValueError:
- pass
- return dict.pop(self, key, *args, **kwargs)
- def keys(self):
- return self._list[:]
- def iterkeys(self):
- return self._list.__iter__()
- def iteritems(self):
- for k in self._list:
- yield k, self[k]
- def insert(self, index, key, val):
- self._list.insert(index, key)
- dict.__setitem__(self, key, val)
- def __repr__(self):
- if not self:
- return '%s()' % self.__class__.__name__
- return '%s(%r)' % (self.__class__.__name__, self.items())
+ del self[key]
+ super(sortdict, self).__setitem__(key, value)
class _lrucachenode(object):
"""A node in a doubly linked list.
@@ -1049,28 +1033,20 @@
except Exception:
pass
cmd = quotecommand(cmd)
- if pycompat.sysplatform == 'plan9' and (sys.version_info[0] == 2
- and sys.version_info[1] < 7):
- # subprocess kludge to work around issues in half-baked Python
- # ports, notably bichued/python:
- if not cwd is None:
- os.chdir(cwd)
- rc = os.system(cmd)
+ env = shellenviron(environ)
+ if out is None or _isstdout(out):
+ rc = subprocess.call(cmd, shell=True, close_fds=closefds,
+ env=env, cwd=cwd)
else:
- env = shellenviron(environ)
- if out is None or _isstdout(out):
- rc = subprocess.call(cmd, shell=True, close_fds=closefds,
- env=env, cwd=cwd)
- else:
- proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
- env=env, cwd=cwd, stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT)
- for line in iter(proc.stdout.readline, ''):
- out.write(line)
- proc.wait()
- rc = proc.returncode
- if pycompat.sysplatform == 'OpenVMS' and rc & 1:
- rc = 0
+ proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
+ env=env, cwd=cwd, stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+ for line in iter(proc.stdout.readline, ''):
+ out.write(line)
+ proc.wait()
+ rc = proc.returncode
+ if pycompat.sysplatform == 'OpenVMS' and rc & 1:
+ rc = 0
return rc
def checksignature(func):
@@ -1086,7 +1062,7 @@
return check
# a whilelist of known filesystems where hardlink works reliably
-_hardlinkfswhitelist = set([
+_hardlinkfswhitelist = {
'btrfs',
'ext2',
'ext3',
@@ -1098,7 +1074,7 @@
'ufs',
'xfs',
'zfs',
-])
+}
def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
'''copy a file, preserving mode and optionally other stat info like
@@ -1114,7 +1090,7 @@
oldstat = None
if os.path.lexists(dest):
if checkambig:
- oldstat = checkambig and filestat(dest)
+ oldstat = checkambig and filestat.frompath(dest)
unlink(dest)
if hardlink:
# Hardlinks are problematic on CIFS (issue4546), do not allow hardlinks
@@ -1144,7 +1120,7 @@
else:
shutil.copymode(src, dest)
if oldstat and oldstat.stat:
- newstat = filestat(dest)
+ newstat = filestat.frompath(dest)
if newstat.isambig(oldstat):
# stat of copied file is ambiguous to original one
advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
@@ -1164,7 +1140,7 @@
os.stat(os.path.dirname(dst)).st_dev)
topic = gettopic()
os.mkdir(dst)
- for name, kind in osutil.listdir(src):
+ for name, kind in listdir(src):
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
def nprog(t, pos):
@@ -1522,13 +1498,23 @@
exists. Otherwise, it is None. This can avoid preparative
'exists()' examination on client side of this class.
"""
- def __init__(self, path):
+ def __init__(self, stat):
+ self.stat = stat
+
+ @classmethod
+ def frompath(cls, path):
try:
- self.stat = os.stat(path)
+ stat = os.stat(path)
except OSError as err:
if err.errno != errno.ENOENT:
raise
- self.stat = None
+ stat = None
+ return cls(stat)
+
+ @classmethod
+ def fromfp(cls, fp):
+ stat = os.fstat(fp.fileno())
+ return cls(stat)
__hash__ = object.__hash__
@@ -1541,6 +1527,10 @@
self.stat.st_ctime == old.stat.st_ctime and
self.stat.st_mtime == old.stat.st_mtime)
except AttributeError:
+ pass
+ try:
+ return self.stat is None and old.stat is None
+ except AttributeError:
return False
def isambig(self, old):
@@ -1584,7 +1574,10 @@
'old' should be previous filestat of 'path'.
This skips avoiding ambiguity, if a process doesn't have
- appropriate privileges for 'path'.
+ appropriate privileges for 'path'. This returns False in this
+ case.
+
+ Otherwise, this returns True, as "ambiguity is avoided".
"""
advanced = (old.stat.st_mtime + 1) & 0x7fffffff
try:
@@ -1593,8 +1586,9 @@
if inst.errno == errno.EPERM:
# utime() on the file created by another user causes EPERM,
# if a process doesn't have appropriate privileges
- return
+ return False
raise
+ return True
def __ne__(self, other):
return not self == other
@@ -1630,10 +1624,10 @@
if not self._fp.closed:
self._fp.close()
filename = localpath(self.__name)
- oldstat = self._checkambig and filestat(filename)
+ oldstat = self._checkambig and filestat.frompath(filename)
if oldstat and oldstat.stat:
rename(self._tempname, filename)
- newstat = filestat(filename)
+ newstat = filestat.frompath(filename)
if newstat.isambig(oldstat):
# stat of changed file is ambiguous to original one
advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
@@ -1727,8 +1721,7 @@
iterator over chunks of arbitrary size."""
def __init__(self, in_iter):
- """in_iter is the iterator that's iterating over the input chunks.
- targetsize is how big a buffer to try to maintain."""
+ """in_iter is the iterator that's iterating over the input chunks."""
def splitbig(chunks):
for chunk in chunks:
if len(chunk) > 2**20:
@@ -1917,6 +1910,7 @@
# add missing elements from defaults
usenow = False # default to using biased defaults
for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
+ part = pycompat.bytestr(part)
found = [True for p in part if ("%"+p) in format]
if not found:
date += "@" + defaults[part][usenow]
@@ -1926,7 +1920,8 @@
# elements are relative to today
usenow = True
- timetuple = time.strptime(date, format)
+ timetuple = time.strptime(encoding.strfromlocal(date),
+ encoding.strfromlocal(format))
localunixtime = int(calendar.timegm(timetuple))
if offset is None:
# local timezone
@@ -1984,13 +1979,13 @@
# this piece is for rounding the specific end of unknowns
b = bias.get(part)
if b is None:
- if part[0] in "HMS":
+ if part[0:1] in "HMS":
b = "00"
else:
b = "0"
# this piece is for matching the generic end to today's date
- n = datestr(now, "%" + part[0])
+ n = datestr(now, "%" + part[0:1])
defaults[part] = (b, n)
@@ -2002,15 +1997,15 @@
else:
break
else:
- raise Abort(_('invalid date: %r') % date)
+ raise error.ParseError(_('invalid date: %r') % date)
# validate explicit (probably user-specified) date and
# time zone offset. values must fit in signed 32 bits for
# current 32-bit linux runtimes. timezones go from UTC-12
# to UTC+14
if when < -0x80000000 or when > 0x7fffffff:
- raise Abort(_('date exceeds 32 bits: %d') % when)
+ raise error.ParseError(_('date exceeds 32 bits: %d') % when)
if offset < -50400 or offset > 43200:
- raise Abort(_('impossible time zone offset: %d') % offset)
+ raise error.ParseError(_('impossible time zone offset: %d') % offset)
return when, offset
def matchdate(date):
@@ -2328,7 +2323,7 @@
# First chunk on line is whitespace -- drop it, unless this
# is the very beginning of the text (i.e. no lines started yet).
- if self.drop_whitespace and chunks[-1].strip() == '' and lines:
+ if self.drop_whitespace and chunks[-1].strip() == r'' and lines:
del chunks[-1]
while chunks:
@@ -2350,13 +2345,13 @@
# If the last chunk on this line is all whitespace, drop it.
if (self.drop_whitespace and
- cur_line and cur_line[-1].strip() == ''):
+ cur_line and cur_line[-1].strip() == r''):
del cur_line[-1]
# Convert current line back to a string and store it in list
# of all lines (return value).
if cur_line:
- lines.append(indent + ''.join(cur_line))
+ lines.append(indent + r''.join(cur_line))
return lines
--- a/mercurial/verify.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/verify.py Tue Jun 20 16:33:46 2017 -0400
@@ -49,6 +49,8 @@
self.lrugetctx = util.lrucachefunc(repo.changectx)
self.refersmf = False
self.fncachewarned = False
+ # developer config: verify.skipflags
+ self.skipflags = repo.ui.configint('verify', 'skipflags')
def warn(self, msg):
self.ui.warn(msg + "\n")
@@ -427,13 +429,18 @@
# 2. hash check: depending on flag processor, we may need to
# use either "text" (external), or "rawtext" (in revlog).
try:
- l = len(fl.read(n))
- rp = fl.renamed(n)
- if l != fl.size(i):
- # the "L1 == L2" check
- if len(fl.revision(n, raw=True)) != fl.rawsize(i):
- self.err(lr, _("unpacked size is %s, %s expected") %
- (l, fl.size(i)), f)
+ skipflags = self.skipflags
+ if skipflags:
+ skipflags &= fl.flags(i)
+ if not skipflags:
+ fl.read(n) # side effect: read content and do checkhash
+ rp = fl.renamed(n)
+ # the "L1 == L2" check
+ l1 = fl.rawsize(i)
+ l2 = len(fl.revision(n, raw=True))
+ if l1 != l2:
+ self.err(lr, _("unpacked size is %s, %s expected") %
+ (l2, l1), f)
except error.CensoredNodeError:
# experimental config: censor.policy
if ui.config("censor", "policy", "abort") == "abort":
--- a/mercurial/vfs.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/vfs.py Tue Jun 20 16:33:46 2017 -0400
@@ -17,7 +17,6 @@
from .i18n import _
from . import (
error,
- osutil,
pathutil,
pycompat,
util,
@@ -163,7 +162,7 @@
return fd, fname
def readdir(self, path=None, stat=None, skip=None):
- return osutil.listdir(self.join(path), stat, skip)
+ return util.listdir(self.join(path), stat, skip)
def readlock(self, path):
return util.readlock(self.join(path))
@@ -175,16 +174,24 @@
only if destination file is guarded by any lock
(e.g. repo.lock or repo.wlock).
"""
+ srcpath = self.join(src)
dstpath = self.join(dst)
- oldstat = checkambig and util.filestat(dstpath)
+ oldstat = checkambig and util.filestat.frompath(dstpath)
if oldstat and oldstat.stat:
- ret = util.rename(self.join(src), dstpath)
- newstat = util.filestat(dstpath)
- if newstat.isambig(oldstat):
- # stat of renamed file is ambiguous to original one
- newstat.avoidambig(dstpath, oldstat)
+ def dorename(spath, dpath):
+ ret = util.rename(spath, dpath)
+ newstat = util.filestat.frompath(dpath)
+ if newstat.isambig(oldstat):
+ # stat of renamed file is ambiguous to original one
+ return ret, newstat.avoidambig(dpath, oldstat)
+ return ret, True
+ ret, avoided = dorename(srcpath, dstpath)
+ if not avoided:
+ # simply copy to change owner of srcpath (see issue5418)
+ util.copyfile(dstpath, srcpath)
+ ret, avoided = dorename(srcpath, dstpath)
return ret
- return util.rename(self.join(src), dstpath)
+ return util.rename(srcpath, dstpath)
def readlink(self, path):
return os.readlink(self.join(path))
@@ -618,12 +625,12 @@
"""
def __init__(self, fh):
super(checkambigatclosing, self).__init__(fh)
- object.__setattr__(self, r'_oldstat', util.filestat(fh.name))
+ object.__setattr__(self, r'_oldstat', util.filestat.frompath(fh.name))
def _checkambig(self):
oldstat = self._oldstat
if oldstat.stat:
- newstat = util.filestat(self._origfh.name)
+ newstat = util.filestat.frompath(self._origfh.name)
if newstat.isambig(oldstat):
# stat of changed file is ambiguous to original one
newstat.avoidambig(self._origfh.name, oldstat)
--- a/mercurial/win32.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/win32.py Tue Jun 20 16:33:46 2017 -0400
@@ -131,29 +131,9 @@
('srWindow', _SMALL_RECT),
('dwMaximumWindowSize', _COORD)]
+_STD_OUTPUT_HANDLE = _DWORD(-11).value
_STD_ERROR_HANDLE = _DWORD(-12).value
-# CreateToolhelp32Snapshot, Process32First, Process32Next
-_TH32CS_SNAPPROCESS = 0x00000002
-_MAX_PATH = 260
-
-class _tagPROCESSENTRY32(ctypes.Structure):
- _fields_ = [('dwsize', _DWORD),
- ('cntUsage', _DWORD),
- ('th32ProcessID', _DWORD),
- ('th32DefaultHeapID', ctypes.c_void_p),
- ('th32ModuleID', _DWORD),
- ('cntThreads', _DWORD),
- ('th32ParentProcessID', _DWORD),
- ('pcPriClassBase', _LONG),
- ('dwFlags', _DWORD),
- ('szExeFile', ctypes.c_char * _MAX_PATH)]
-
- def __init__(self):
- super(_tagPROCESSENTRY32, self).__init__()
- self.dwsize = ctypes.sizeof(self)
-
-
# types of parameters of C functions used (required by pypy)
_kernel32.CreateFileA.argtypes = [_LPCSTR, _DWORD, _DWORD, ctypes.c_void_p,
@@ -202,6 +182,12 @@
_kernel32.SetConsoleCtrlHandler.argtypes = [_SIGNAL_HANDLER, _BOOL]
_kernel32.SetConsoleCtrlHandler.restype = _BOOL
+_kernel32.SetConsoleMode.argtypes = [_HANDLE, _DWORD]
+_kernel32.SetConsoleMode.restype = _BOOL
+
+_kernel32.GetConsoleMode.argtypes = [_HANDLE, ctypes.c_void_p]
+_kernel32.GetConsoleMode.restype = _BOOL
+
_kernel32.GetStdHandle.argtypes = [_DWORD]
_kernel32.GetStdHandle.restype = _HANDLE
@@ -221,19 +207,10 @@
_user32.EnumWindows.argtypes = [_WNDENUMPROC, _LPARAM]
_user32.EnumWindows.restype = _BOOL
-_kernel32.CreateToolhelp32Snapshot.argtypes = [_DWORD, _DWORD]
-_kernel32.CreateToolhelp32Snapshot.restype = _BOOL
-
_kernel32.PeekNamedPipe.argtypes = [_HANDLE, ctypes.c_void_p, _DWORD,
ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p]
_kernel32.PeekNamedPipe.restype = _BOOL
-_kernel32.Process32First.argtypes = [_HANDLE, ctypes.c_void_p]
-_kernel32.Process32First.restype = _BOOL
-
-_kernel32.Process32Next.argtypes = [_HANDLE, ctypes.c_void_p]
-_kernel32.Process32Next.restype = _BOOL
-
def _raiseoserror(name):
err = ctypes.WinError()
raise OSError(err.errno, '%s: %s' % (name, err.strerror))
@@ -372,50 +349,28 @@
height = csbi.srWindow.Bottom - csbi.srWindow.Top + 1
return width, height
-def _1stchild(pid):
- '''return the 1st found child of the given pid
+def enablevtmode():
+ '''Enable virtual terminal mode for the associated console. Return True if
+ enabled, else False.'''
- None is returned when no child is found'''
- pe = _tagPROCESSENTRY32()
+ ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x4
+
+ handle = _kernel32.GetStdHandle(_STD_OUTPUT_HANDLE) # don't close the handle
+ if handle == _INVALID_HANDLE_VALUE:
+ return False
- # create handle to list all processes
- ph = _kernel32.CreateToolhelp32Snapshot(_TH32CS_SNAPPROCESS, 0)
- if ph == _INVALID_HANDLE_VALUE:
- raise ctypes.WinError()
- try:
- r = _kernel32.Process32First(ph, ctypes.byref(pe))
- # loop over all processes
- while r:
- if pe.th32ParentProcessID == pid:
- # return first child found
- return pe.th32ProcessID
- r = _kernel32.Process32Next(ph, ctypes.byref(pe))
- finally:
- _kernel32.CloseHandle(ph)
- if _kernel32.GetLastError() != _ERROR_NO_MORE_FILES:
- raise ctypes.WinError()
- return None # no child found
+ mode = _DWORD(0)
+
+ if not _kernel32.GetConsoleMode(handle, ctypes.byref(mode)):
+ return False
-class _tochildpid(int): # pid is _DWORD, which always matches in an int
- '''helper for spawndetached, returns the child pid on conversion to string
+ if (mode.value & ENABLE_VIRTUAL_TERMINAL_PROCESSING) == 0:
+ mode.value |= ENABLE_VIRTUAL_TERMINAL_PROCESSING
- Does not resolve the child pid immediately because the child may not yet be
- started.
- '''
- def childpid(self):
- '''returns the child pid of the first found child of the process
- with this pid'''
- return _1stchild(self)
- def __str__(self):
- # run when the pid is written to the file
- ppid = self.childpid()
- if ppid is None:
- # race, child has exited since check
- # fall back to this pid. Its process will also have disappeared,
- # raising the same error type later as when the child pid would
- # be returned.
- return " %d" % self
- return str(ppid)
+ if not _kernel32.SetConsoleMode(handle, mode):
+ return False
+
+ return True
def spawndetached(args):
# No standard library function really spawns a fully detached
@@ -436,10 +391,6 @@
env += '\0'
args = subprocess.list2cmdline(args)
- # Not running the command in shell mode makes Python 2.6 hang when
- # writing to hgweb output socket.
- comspec = encoding.environ.get("COMSPEC", "cmd.exe")
- args = comspec + " /c " + args
res = _kernel32.CreateProcessA(
None, args, None, None, False, _CREATE_NO_WINDOW,
@@ -447,8 +398,7 @@
if not res:
raise ctypes.WinError()
- # _tochildpid because the process is the child of COMSPEC
- return _tochildpid(pi.dwProcessId)
+ return pi.dwProcessId
def unlink(f):
'''try to implement POSIX' unlink semantics on Windows'''
--- a/mercurial/windows.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/windows.py Tue Jun 20 16:33:46 2017 -0400
@@ -17,7 +17,7 @@
from .i18n import _
from . import (
encoding,
- osutil,
+ policy,
pycompat,
win32,
)
@@ -28,6 +28,8 @@
except ImportError:
import winreg
+osutil = policy.importmod(r'osutil')
+
executablepath = win32.executablepath
getuser = win32.getuser
hidewindow = win32.hidewindow
@@ -136,6 +138,9 @@
# convert to a friendlier exception
raise IOError(err.errno, '%s: %s' % (name, err.strerror))
+# may be wrapped by win32mbcs extension
+listdir = osutil.listdir
+
class winstdout(object):
'''stdout on windows misbehaves if sent through a pipe'''
@@ -175,7 +180,6 @@
except IOError as inst:
if inst.errno != errno.EINVAL:
raise
- self.close()
raise IOError(errno.EPIPE, 'Broken pipe')
def _is_win_9x():
@@ -331,7 +335,7 @@
return executable
return findexisting(os.path.expanduser(os.path.expandvars(command)))
-_wantedkinds = set([stat.S_IFREG, stat.S_IFLNK])
+_wantedkinds = {stat.S_IFREG, stat.S_IFLNK}
def statfiles(files):
'''Stat each file in files. Yield each stat, or None if a file
@@ -349,7 +353,7 @@
if cache is None:
try:
dmap = dict([(normcase(n), s)
- for n, k, s in osutil.listdir(dir, True)
+ for n, k, s in listdir(dir, True)
if getkind(s.st_mode) in _wantedkinds])
except OSError as err:
# Python >= 2.5 returns ENOENT and adds winerror field
@@ -376,7 +380,7 @@
def removedirs(name):
"""special version of os.removedirs that does not remove symlinked
directories or junction points if they actually contain files"""
- if osutil.listdir(name):
+ if listdir(name):
return
os.rmdir(name)
head, tail = os.path.split(name)
@@ -384,7 +388,7 @@
head, tail = os.path.split(head)
while head and tail:
try:
- if osutil.listdir(head):
+ if listdir(head):
return
os.rmdir(head)
except (ValueError, OSError):
--- a/mercurial/wireproto.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/wireproto.py Tue Jun 20 16:33:46 2017 -0400
@@ -16,6 +16,7 @@
from .node import (
bin,
hex,
+ nullid,
)
from . import (
@@ -410,7 +411,7 @@
remote server as a bundle.
When pushing a bundle10 stream, return an integer indicating the
- result of the push (see localrepository.addchangegroup()).
+ result of the push (see changegroup.apply()).
When pushing a bundle20 stream, return a bundle20 stream.
@@ -753,12 +754,12 @@
"""
# copy to prevent modification of the global list
caps = list(wireprotocaps)
- if streamclone.allowservergeneration(repo.ui):
+ if streamclone.allowservergeneration(repo):
if repo.ui.configbool('server', 'preferuncompressed', False):
caps.append('stream-preferred')
requiredformats = repo.requirements & repo.supportedformats
# if our local revlogs are just revlogv1, add 'stream' cap
- if not requiredformats - set(('revlogv1',)):
+ if not requiredformats - {'revlogv1'}:
caps.append('stream')
# otherwise, add 'streamreqs' detailing our local revlog format
else:
@@ -841,6 +842,17 @@
hint=bundle2requiredhint)
try:
+ if repo.ui.configbool('server', 'disablefullbundle', False):
+ # Check to see if this is a full clone.
+ clheads = set(repo.changelog.heads())
+ heads = set(opts.get('heads', set()))
+ common = set(opts.get('common', set()))
+ common.discard(nullid)
+ if not common and clheads == heads:
+ raise error.Abort(
+ _('server has pull-based clones disabled'),
+ hint=_('remove --pull if specified or upgrade Mercurial'))
+
chunks = exchange.getbundlechunks(repo, 'serve', **opts)
except error.Abort as exc:
# cleanly forward Abort error to the client
@@ -934,7 +946,7 @@
capability with a value representing the version and flags of the repo
it is serving. Client checks to see if it understands the format.
'''
- if not streamclone.allowservergeneration(repo.ui):
+ if not streamclone.allowservergeneration(repo):
return '1\n'
def getstream(it):
--- a/mercurial/worker.py Tue Jun 13 22:24:41 2017 -0400
+++ b/mercurial/worker.py Tue Jun 20 16:33:46 2017 -0400
@@ -26,7 +26,7 @@
# posix
try:
- n = int(os.sysconf('SC_NPROCESSORS_ONLN'))
+ n = int(os.sysconf(r'SC_NPROCESSORS_ONLN'))
if n > 0:
return n
except (AttributeError, ValueError):
@@ -134,37 +134,43 @@
killworkers()
oldchldhandler = signal.signal(signal.SIGCHLD, sigchldhandler)
ui.flush()
+ parentpid = os.getpid()
for pargs in partition(args, workers):
- pid = os.fork()
- if pid == 0:
- signal.signal(signal.SIGINT, oldhandler)
- signal.signal(signal.SIGCHLD, oldchldhandler)
-
- def workerfunc():
- os.close(rfd)
- for i, item in func(*(staticargs + (pargs,))):
- os.write(wfd, '%d %s\n' % (i, item))
- return 0
+ # make sure we use os._exit in all worker code paths. otherwise the
+ # worker may do some clean-ups which could cause surprises like
+ # deadlock. see sshpeer.cleanup for example.
+ # override error handling *before* fork. this is necessary because
+ # exception (signal) may arrive after fork, before "pid =" assignment
+ # completes, and other exception handler (dispatch.py) can lead to
+ # unexpected code path without os._exit.
+ ret = -1
+ try:
+ pid = os.fork()
+ if pid == 0:
+ signal.signal(signal.SIGINT, oldhandler)
+ signal.signal(signal.SIGCHLD, oldchldhandler)
- # make sure we use os._exit in all code paths. otherwise the worker
- # may do some clean-ups which could cause surprises like deadlock.
- # see sshpeer.cleanup for example.
- ret = 0
- try:
+ def workerfunc():
+ os.close(rfd)
+ for i, item in func(*(staticargs + (pargs,))):
+ os.write(wfd, '%d %s\n' % (i, item))
+ return 0
+
+ ret = scmutil.callcatch(ui, workerfunc)
+ except: # parent re-raises, child never returns
+ if os.getpid() == parentpid:
+ raise
+ exctype = sys.exc_info()[0]
+ force = not issubclass(exctype, KeyboardInterrupt)
+ ui.traceback(force=force)
+ finally:
+ if os.getpid() != parentpid:
try:
- ret = scmutil.callcatch(ui, workerfunc)
- finally:
ui.flush()
- except KeyboardInterrupt:
- os._exit(255)
- except: # never return, therefore no re-raises
- try:
- ui.traceback(force=True)
- ui.flush()
+ except: # never returns, no re-raises
+ pass
finally:
- os._exit(255)
- else:
- os._exit(ret & 255)
+ os._exit(ret & 255)
pids.add(pid)
os.close(wfd)
fp = os.fdopen(rfd, pycompat.sysstr('rb'), 0)
--- a/setup.py Tue Jun 13 22:24:41 2017 -0400
+++ b/setup.py Tue Jun 20 16:33:46 2017 -0400
@@ -5,8 +5,8 @@
# 'python setup.py --help' for more options
import sys, platform
-if getattr(sys, 'version_info', (0, 0, 0)) < (2, 6, 0, 'final'):
- raise SystemExit("Mercurial requires Python 2.6 or later.")
+if sys.version_info < (2, 7, 0, 'final'):
+ raise SystemExit('Mercurial requires Python 2.7 or later.')
if sys.version_info[0] >= 3:
printf = eval('print')
@@ -77,6 +77,7 @@
from distutils.command.build_ext import build_ext
from distutils.command.build_py import build_py
from distutils.command.build_scripts import build_scripts
+from distutils.command.install import install
from distutils.command.install_lib import install_lib
from distutils.command.install_scripts import install_scripts
from distutils.spawn import spawn, find_executable
@@ -142,17 +143,10 @@
py2exeloaded = False
def runcmd(cmd, env):
- if (sys.platform == 'plan9'
- and (sys.version_info[0] == 2 and sys.version_info[1] < 7)):
- # subprocess kludge to work around issues in half-baked Python
- # ports, notably bichued/python:
- _, out, err = os.popen3(cmd)
- return str(out), str(err)
- else:
- p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
- stderr=subprocess.PIPE, env=env)
- out, err = p.communicate()
- return out, err
+ p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE, env=env)
+ out, err = p.communicate()
+ return out, err
def runhg(cmd, env):
out, err = runcmd(cmd, env)
@@ -182,9 +176,8 @@
if 'LD_LIBRARY_PATH' in os.environ:
env['LD_LIBRARY_PATH'] = os.environ['LD_LIBRARY_PATH']
if 'SystemRoot' in os.environ:
- # Copy SystemRoot into the custom environment for Python 2.6
- # under Windows. Otherwise, the subprocess will fail with
- # error 0xc0150004. See: http://bugs.python.org/issue3440
+ # SystemRoot is required by Windows to load various DLLs. See:
+ # https://bugs.python.org/issue13524#msg148850
env['SystemRoot'] = os.environ['SystemRoot']
if os.path.isdir('.hg'):
@@ -353,15 +346,15 @@
self.distribution.ext_modules = []
elif self.distribution.cffi:
from mercurial.cffi import (
- bdiff,
- mpatch,
+ bdiffbuild,
+ mpatchbuild,
)
- exts = [mpatch.ffi.distutils_extension(),
- bdiff.ffi.distutils_extension()]
+ exts = [mpatchbuild.ffi.distutils_extension(),
+ bdiffbuild.ffi.distutils_extension()]
# cffi modules go here
if sys.platform == 'darwin':
- from mercurial.cffi import osutil
- exts.append(osutil.ffi.distutils_extension())
+ from mercurial.cffi import osutilbuild
+ exts.append(osutilbuild.ffi.distutils_extension())
self.distribution.ext_modules = exts
else:
h = os.path.join(get_python_inc(), 'Python.h')
@@ -370,11 +363,17 @@
'Mercurial but weren\'t found in %s' % h)
def run(self):
+ basepath = os.path.join(self.build_lib, 'mercurial')
+ self.mkpath(basepath)
+
if self.distribution.pure:
modulepolicy = 'py'
+ elif self.build_lib == '.':
+ # in-place build should run without rebuilding C extensions
+ modulepolicy = 'allow'
else:
modulepolicy = 'c'
- with open("mercurial/__modulepolicy__.py", "w") as f:
+ with open(os.path.join(basepath, '__modulepolicy__.py'), "w") as f:
f.write('# this file is autogenerated by setup.py\n')
f.write('modulepolicy = b"%s"\n' % modulepolicy)
@@ -461,6 +460,25 @@
dir = os.path.dirname(self.get_ext_fullpath('dummy'))
return os.path.join(self.build_temp, dir, 'hg.exe')
+class hginstall(install):
+
+ user_options = install.user_options + [
+ ('old-and-unmanageable', None,
+ 'noop, present for eggless setuptools compat'),
+ ('single-version-externally-managed', None,
+ 'noop, present for eggless setuptools compat'),
+ ]
+
+ # Also helps setuptools not be sad while we refuse to create eggs.
+ single_version_externally_managed = True
+
+ def get_sub_commands(self):
+ # Screen out egg related commands to prevent egg generation. But allow
+ # mercurial.egg-info generation, since that is part of modern
+ # packaging.
+ excl = {'bdist_egg'}
+ return filter(lambda x: x not in excl, install.get_sub_commands(self))
+
class hginstalllib(install_lib):
'''
This is a specialization of install_lib that replaces the copy_file used
@@ -572,20 +590,27 @@
'build_py': hgbuildpy,
'build_scripts': hgbuildscripts,
'build_hgextindex': buildhgextindex,
+ 'install': hginstall,
'install_lib': hginstalllib,
'install_scripts': hginstallscripts,
'build_hgexe': buildhgexe,
}
-packages = ['mercurial', 'mercurial.hgweb', 'mercurial.httpclient',
+packages = ['mercurial',
+ 'mercurial.cext',
+ 'mercurial.cffi',
+ 'mercurial.hgweb',
+ 'mercurial.httpclient',
'mercurial.pure',
'hgext', 'hgext.convert', 'hgext.fsmonitor',
'hgext.fsmonitor.pywatchman', 'hgext.highlight',
- 'hgext.largefiles', 'hgext.zeroconf', 'hgext3rd']
+ 'hgext.largefiles', 'hgext.zeroconf', 'hgext3rd',
+ 'hgdemandimport']
common_depends = ['mercurial/bitmanipulation.h',
'mercurial/compat.h',
- 'mercurial/util.h']
+ 'mercurial/cext/util.h']
+common_include_dirs = ['mercurial']
osutil_cflags = []
osutil_ldflags = []
@@ -614,22 +639,29 @@
osutil_ldflags += ['-framework', 'ApplicationServices']
extmodules = [
- Extension('mercurial.base85', ['mercurial/base85.c'],
+ Extension('mercurial.cext.base85', ['mercurial/cext/base85.c'],
+ include_dirs=common_include_dirs,
depends=common_depends),
- Extension('mercurial.bdiff', ['mercurial/bdiff.c',
- 'mercurial/bdiff_module.c'],
+ Extension('mercurial.cext.bdiff', ['mercurial/bdiff.c',
+ 'mercurial/cext/bdiff.c'],
+ include_dirs=common_include_dirs,
depends=common_depends + ['mercurial/bdiff.h']),
- Extension('mercurial.diffhelpers', ['mercurial/diffhelpers.c'],
+ Extension('mercurial.cext.diffhelpers', ['mercurial/cext/diffhelpers.c'],
+ include_dirs=common_include_dirs,
depends=common_depends),
- Extension('mercurial.mpatch', ['mercurial/mpatch.c',
- 'mercurial/mpatch_module.c'],
+ Extension('mercurial.cext.mpatch', ['mercurial/mpatch.c',
+ 'mercurial/cext/mpatch.c'],
+ include_dirs=common_include_dirs,
depends=common_depends),
- Extension('mercurial.parsers', ['mercurial/dirs.c',
- 'mercurial/manifest.c',
- 'mercurial/parsers.c',
- 'mercurial/pathencode.c'],
+ Extension('mercurial.cext.parsers', ['mercurial/cext/dirs.c',
+ 'mercurial/cext/manifest.c',
+ 'mercurial/cext/parsers.c',
+ 'mercurial/cext/pathencode.c',
+ 'mercurial/cext/revlog.c'],
+ include_dirs=common_include_dirs,
depends=common_depends),
- Extension('mercurial.osutil', ['mercurial/osutil.c'],
+ Extension('mercurial.cext.osutil', ['mercurial/cext/osutil.c'],
+ include_dirs=common_include_dirs,
extra_compile_args=osutil_cflags,
extra_link_args=osutil_ldflags,
depends=common_depends),
@@ -664,6 +696,23 @@
class HackedMingw32CCompiler(object):
pass
+if os.name == 'nt':
+ # Allow compiler/linker flags to be added to Visual Studio builds. Passing
+ # extra_link_args to distutils.extensions.Extension() doesn't have any
+ # effect.
+ from distutils import msvccompiler
+
+ compiler = msvccompiler.MSVCCompiler
+
+ class HackedMSVCCompiler(msvccompiler.MSVCCompiler):
+ def initialize(self):
+ compiler.initialize(self)
+ # "warning LNK4197: export 'func' specified multiple times"
+ self.ldflags_shared.append('/ignore:4197')
+ self.ldflags_shared_debug.append('/ignore:4197')
+
+ msvccompiler.MSVCCompiler = HackedMSVCCompiler
+
packagedata = {'mercurial': ['locale/*/LC_MESSAGES/hg.mo',
'help/*.txt',
'help/internals/*.txt',
@@ -779,7 +828,7 @@
package_data=packagedata,
cmdclass=cmdclass,
distclass=hgdist,
- options={'py2exe': {'packages': ['hgext', 'email']},
+ options={'py2exe': {'packages': ['hgdemandimport', 'hgext', 'email']},
'bdist_mpkg': {'zipdist': False,
'license': 'COPYING',
'readme': 'contrib/macosx/Readme.html',
--- a/tests/autodiff.py Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/autodiff.py Tue Jun 20 16:33:46 2017 -0400
@@ -3,14 +3,14 @@
from __future__ import absolute_import
from mercurial import (
- cmdutil,
error,
patch,
+ registrar,
scmutil,
)
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
@command('autodiff',
[('', 'git', '', 'git upgrade mode (yes/no/auto/warn/abort)')],
--- a/tests/blacklists/fsmonitor Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/blacklists/fsmonitor Tue Jun 20 16:33:46 2017 -0400
@@ -1,7 +1,5 @@
# Blacklist for a full testsuite run with fsmonitor enabled.
-# Use with
-# run-tests --blacklist=blacklists/fsmonitor \
-# --extra-config="extensions.fsmonitor="
+# Used by fsmonitor-run-tests.
# The following tests all fail because they either use extensions that conflict
# with fsmonitor, use subrepositories, or don't anticipate the extra file in
# the .hg directory that fsmonitor adds.
--- a/tests/check-perf-code.py Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/check-perf-code.py Tue Jun 20 16:33:46 2017 -0400
@@ -26,7 +26,7 @@
replacement = [('.py', ''), ('.c', ''), # trim suffix
('mercurial%s' % (os.sep), ''), # trim "mercurial/" path
]
- ignored = set(['__init__'])
+ ignored = {'__init__'}
modules = {}
# convert from file name to module name, and count # of appearances
--- a/tests/drawdag.py Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/drawdag.py Tue Jun 20 16:33:46 2017 -0400
@@ -80,16 +80,16 @@
from mercurial.i18n import _
from mercurial import (
- cmdutil,
context,
error,
node,
+ registrar,
scmutil,
tags as tagsmod,
)
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
_pipechars = '\\/+-|'
_nonpipechars = ''.join(chr(i) for i in xrange(33, 127)
@@ -145,7 +145,7 @@
def parents(y, x):
"""(int, int) -> [str]. follow the ASCII edges at given position,
return a list of parents"""
- visited = set([(y, x)])
+ visited = {(y, x)}
visit = []
result = []
@@ -214,6 +214,9 @@
def data(self):
return self._data
+ def filenode(self):
+ return None
+
def path(self):
return self._path
--- a/tests/f Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/f Tue Jun 20 16:33:46 2017 -0400
@@ -51,7 +51,7 @@
if opts.type:
facts.append('file')
if opts.hexdump or opts.dump or opts.md5:
- content = file(f, 'rb').read()
+ content = open(f, 'rb').read()
elif islink:
if opts.type:
facts.append('link')
--- a/tests/fakedirstatewritetime.py Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/fakedirstatewritetime.py Tue Jun 20 16:33:46 2017 -0400
@@ -2,7 +2,7 @@
# specified by '[fakedirstatewritetime] fakenow', only when
# 'dirstate.write()' is invoked via functions below:
#
-# - 'workingctx._checklookup()' (= 'repo.status()')
+# - 'workingctx._poststatusfixup()' (= 'repo.status()')
# - 'committablectx.markcommitted()'
from __future__ import absolute_import
@@ -11,10 +11,12 @@
context,
dirstate,
extensions,
- parsers,
+ policy,
util,
)
+parsers = policy.importmod(r'parsers')
+
def pack_dirstate(fakenow, orig, dmap, copymap, pl, now):
# execute what original parsers.pack_dirstate should do actually
# for consistency
@@ -53,16 +55,16 @@
parsers.pack_dirstate = orig_pack_dirstate
dirstate._getfsnow = orig_dirstate_getfsnow
-def _checklookup(orig, workingctx, files):
+def _poststatusfixup(orig, workingctx, status, fixup):
ui = workingctx.repo().ui
- return fakewrite(ui, lambda : orig(workingctx, files))
+ return fakewrite(ui, lambda : orig(workingctx, status, fixup))
def markcommitted(orig, committablectx, node):
ui = committablectx.repo().ui
return fakewrite(ui, lambda : orig(committablectx, node))
def extsetup(ui):
- extensions.wrapfunction(context.workingctx, '_checklookup',
- _checklookup)
+ extensions.wrapfunction(context.workingctx, '_poststatusfixup',
+ _poststatusfixup)
extensions.wrapfunction(context.committablectx, 'markcommitted',
markcommitted)
--- a/tests/fakemergerecord.py Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/fakemergerecord.py Tue Jun 20 16:33:46 2017 -0400
@@ -5,12 +5,12 @@
from __future__ import absolute_import
from mercurial import (
- cmdutil,
merge,
+ registrar,
)
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
@command('fakemergerecord',
[('X', 'mandatory', None, 'add a fake mandatory record'),
--- a/tests/filterpyflakes.py Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/filterpyflakes.py Tue Jun 20 16:33:46 2017 -0400
@@ -11,8 +11,11 @@
for line in sys.stdin:
# We blacklist tests that are too noisy for us
pats = [
- r"undefined name '(WindowsError|memoryview)'",
+ r"undefined name 'WindowsError'",
r"redefinition of unused '[^']+' from line",
+ # for cffi, allow re-exports from pure.*
+ r"cffi/[^:]*:.*\bimport \*' used",
+ r"cffi/[^:]*:.*\*' imported but unused",
]
keep = True
@@ -33,7 +36,6 @@
sys.stdout.write(line)
print()
-# self test of "undefined name" detection for other than 'memoryview'
+# self test of "undefined name" detection
if False:
- print(memoryview)
print(undefinedname)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/fsmonitor-run-tests.py Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,133 @@
+#!/usr/bin/env python
+
+# fsmonitor-run-tests.py - Run Mercurial tests with fsmonitor enabled
+#
+# Copyright 2017 Facebook, Inc.
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+#
+# This is a wrapper around run-tests.py that spins up an isolated instance of
+# Watchman and runs the Mercurial tests against it. This ensures that the global
+# version of Watchman isn't affected by anything this test does.
+
+from __future__ import absolute_import
+from __future__ import print_function
+
+import argparse
+import contextlib
+import json
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+import uuid
+
+osenvironb = getattr(os, 'environb', os.environ)
+
+if sys.version_info > (3, 5, 0):
+ PYTHON3 = True
+ xrange = range # we use xrange in one place, and we'd rather not use range
+ def _bytespath(p):
+ return p.encode('utf-8')
+
+elif sys.version_info >= (3, 0, 0):
+ print('%s is only supported on Python 3.5+ and 2.7, not %s' %
+ (sys.argv[0], '.'.join(str(v) for v in sys.version_info[:3])))
+ sys.exit(70) # EX_SOFTWARE from `man 3 sysexit`
+else:
+ PYTHON3 = False
+
+ # In python 2.x, path operations are generally done using
+ # bytestrings by default, so we don't have to do any extra
+ # fiddling there. We define the wrapper functions anyway just to
+ # help keep code consistent between platforms.
+ def _bytespath(p):
+ return p
+
+def getparser():
+ """Obtain the argument parser used by the CLI."""
+ parser = argparse.ArgumentParser(
+ description='Run tests with fsmonitor enabled.',
+ epilog='Unrecognized options are passed to run-tests.py.')
+ # - keep these sorted
+ # - none of these options should conflict with any in run-tests.py
+ parser.add_argument('--keep-fsmonitor-tmpdir', action='store_true',
+ help='keep temporary directory with fsmonitor state')
+ parser.add_argument('--watchman',
+ help='location of watchman binary (default: watchman in PATH)',
+ default='watchman')
+
+ return parser
+
+@contextlib.contextmanager
+def watchman(args):
+ basedir = tempfile.mkdtemp(prefix='hg-fsmonitor')
+ try:
+ # Much of this configuration is borrowed from Watchman's test harness.
+ cfgfile = os.path.join(basedir, 'config.json')
+ # TODO: allow setting a config
+ with open(cfgfile, 'w') as f:
+ f.write(json.dumps({}))
+
+ logfile = os.path.join(basedir, 'log')
+ clilogfile = os.path.join(basedir, 'cli-log')
+ if os.name == 'nt':
+ sockfile = '\\\\.\\pipe\\watchman-test-%s' % uuid.uuid4().hex
+ else:
+ sockfile = os.path.join(basedir, 'sock')
+ pidfile = os.path.join(basedir, 'pid')
+ statefile = os.path.join(basedir, 'state')
+
+ argv = [
+ args.watchman,
+ '--sockname', sockfile,
+ '--logfile', logfile,
+ '--pidfile', pidfile,
+ '--statefile', statefile,
+ '--foreground',
+ '--log-level=2', # debug logging for watchman
+ ]
+
+ envb = osenvironb.copy()
+ envb[b'WATCHMAN_CONFIG_FILE'] = _bytespath(cfgfile)
+ with open(clilogfile, 'wb') as f:
+ proc = subprocess.Popen(
+ argv, env=envb, stdin=None, stdout=f, stderr=f)
+ try:
+ yield sockfile
+ finally:
+ proc.terminate()
+ proc.kill()
+ finally:
+ if args.keep_fsmonitor_tmpdir:
+ print('fsmonitor dir available at %s' % basedir)
+ else:
+ shutil.rmtree(basedir, ignore_errors=True)
+
+def run():
+ parser = getparser()
+ args, runtestsargv = parser.parse_known_args()
+
+ with watchman(args) as sockfile:
+ osenvironb[b'WATCHMAN_SOCK'] = _bytespath(sockfile)
+ # Indicate to hghave that we're running with fsmonitor enabled.
+ osenvironb[b'HGFSMONITOR_TESTS'] = b'1'
+
+ runtestdir = os.path.dirname(__file__)
+ runtests = os.path.join(runtestdir, 'run-tests.py')
+ blacklist = os.path.join(runtestdir, 'blacklists', 'fsmonitor')
+
+ runtestsargv.insert(0, runtests)
+ runtestsargv.extend([
+ '--extra-config',
+ 'extensions.fsmonitor=',
+ '--blacklist',
+ blacklist,
+ ])
+
+ return subprocess.call(runtestsargv)
+
+if __name__ == '__main__':
+ sys.exit(run())
--- a/tests/generate-working-copy-states.py Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/generate-working-copy-states.py Tue Jun 20 16:33:46 2017 -0400
@@ -47,7 +47,7 @@
content in parentcontents]) + "-" + tracked
yield (filename, parentcontents)
else:
- for content in (set([None, 'content' + str(depth + 1)]) |
+ for content in ({None, 'content' + str(depth + 1)} |
set(parentcontents)):
for combination in generatestates(maxchangesets,
parentcontents + [content]):
--- a/tests/hghave.py Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/hghave.py Tue Jun 20 16:33:46 2017 -0400
@@ -278,6 +278,17 @@
def has_git():
return matchoutput('git --version 2>&1', br'^git version')
+def getgitversion():
+ m = matchoutput('git --version 2>&1', br'git version (\d+)\.(\d+)')
+ if not m:
+ return (0, 0)
+ return (int(m.group(1)), int(m.group(2)))
+
+@checkvers("git", "git client (with ext::sh support) version >= %s", (1.9,))
+def has_git_range(v):
+ major, minor = v.split('.')[0:2]
+ return getgitversion() >= (int(major), int(minor))
+
@check("docutils", "Docutils text processing library")
def has_docutils():
try:
@@ -502,7 +513,7 @@
@check("serve", "platform and python can manage 'hg serve -d'")
def has_serve():
- return os.name != 'nt' # gross approximation
+ return True
@check("test-repo", "running tests from repository")
def has_test_repo():
@@ -580,10 +591,6 @@
from mercurial import util
return util.safehasattr(__future__, "absolute_import")
-@check("py27+", "running with Python 2.7+")
-def has_python27ornewer():
- return sys.version_info[0:2] >= (2, 7)
-
@check("py3k", "running with Python 3.x")
def has_py3k():
return 3 == sys.version_info[0]
@@ -609,7 +616,7 @@
os.environ.get("HGTEST_RUN_TESTS_PURE") == "--pure",
])
-@check("slow", "allow slow tests")
+@check("slow", "allow slow tests (use --allow-slow-tests)")
def has_slow():
return os.environ.get('HGTEST_SLOW') == 'slow'
@@ -638,3 +645,16 @@
@check("devfull", "/dev/full special file")
def has_dev_full():
return os.path.exists('/dev/full')
+
+@check("virtualenv", "Python virtualenv support")
+def has_virtualenv():
+ try:
+ import virtualenv
+ virtualenv.ACTIVATE_SH
+ return True
+ except ImportError:
+ return False
+
+@check("fsmonitor", "running tests with fsmonitor")
+def has_fsmonitor():
+ return 'HGFSMONITOR_TESTS' in os.environ
--- a/tests/killdaemons.py Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/killdaemons.py Tue Jun 20 16:33:46 2017 -0400
@@ -10,6 +10,26 @@
if os.name =='nt':
import ctypes
+ _BOOL = ctypes.c_long
+ _DWORD = ctypes.c_ulong
+ _UINT = ctypes.c_uint
+ _HANDLE = ctypes.c_void_p
+
+ ctypes.windll.kernel32.CloseHandle.argtypes = [_HANDLE]
+ ctypes.windll.kernel32.CloseHandle.restype = _BOOL
+
+ ctypes.windll.kernel32.GetLastError.argtypes = []
+ ctypes.windll.kernel32.GetLastError.restype = _DWORD
+
+ ctypes.windll.kernel32.OpenProcess.argtypes = [_DWORD, _BOOL, _DWORD]
+ ctypes.windll.kernel32.OpenProcess.restype = _HANDLE
+
+ ctypes.windll.kernel32.TerminateProcess.argtypes = [_HANDLE, _UINT]
+ ctypes.windll.kernel32.TerminateProcess.restype = _BOOL
+
+ ctypes.windll.kernel32.WaitForSingleObject.argtypes = [_HANDLE, _DWORD]
+ ctypes.windll.kernel32.WaitForSingleObject.restype = _DWORD
+
def _check(ret, expectederr=None):
if ret == 0:
winerrno = ctypes.GetLastError()
@@ -24,10 +44,11 @@
SYNCHRONIZE = 0x00100000
WAIT_OBJECT_0 = 0
WAIT_TIMEOUT = 258
+ WAIT_FAILED = _DWORD(0xFFFFFFFF).value
handle = ctypes.windll.kernel32.OpenProcess(
PROCESS_TERMINATE|SYNCHRONIZE|PROCESS_QUERY_INFORMATION,
False, pid)
- if handle == 0:
+ if handle is None:
_check(0, 87) # err 87 when process not found
return # process not found, already finished
try:
@@ -36,8 +57,8 @@
pass # terminated, but process handle still available
elif r == WAIT_TIMEOUT:
_check(ctypes.windll.kernel32.TerminateProcess(handle, -1))
- else:
- _check(r)
+ elif r == WAIT_FAILED:
+ _check(0) # err stored in GetLastError()
# TODO?: forcefully kill when timeout
# and ?shorter waiting time? when tryhard==True
@@ -47,8 +68,8 @@
pass # process is terminated
elif r == WAIT_TIMEOUT:
logfn('# Daemon process %d is stuck')
- else:
- _check(r) # any error
+ elif r == WAIT_FAILED:
+ _check(0) # err stored in GetLastError()
except: #re-raises
ctypes.windll.kernel32.CloseHandle(handle) # no _check, keep error
raise
@@ -78,18 +99,20 @@
logfn = lambda s: s
# Kill off any leftover daemon processes
try:
- fp = open(pidfile)
- for line in fp:
- try:
- pid = int(line)
- if pid <= 0:
- raise ValueError
- except ValueError:
- logfn('# Not killing daemon process %s - invalid pid'
- % line.rstrip())
- continue
+ pids = []
+ with open(pidfile) as fp:
+ for line in fp:
+ try:
+ pid = int(line)
+ if pid <= 0:
+ raise ValueError
+ except ValueError:
+ logfn('# Not killing daemon process %s - invalid pid'
+ % line.rstrip())
+ continue
+ pids.append(pid)
+ for pid in pids:
kill(pid, logfn, tryhard)
- fp.close()
if remove:
os.unlink(pidfile)
except IOError:
--- a/tests/md5sum.py Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/md5sum.py Tue Jun 20 16:33:46 2017 -0400
@@ -34,7 +34,7 @@
m = md5()
try:
- for data in iter(lambda: fp.read(8192), ''):
+ for data in iter(lambda: fp.read(8192), b''):
m.update(data)
except IOError as msg:
sys.stderr.write('%s: I/O error: %s\n' % (filename, msg))
--- a/tests/mockblackbox.py Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/mockblackbox.py Tue Jun 20 16:33:46 2017 -0400
@@ -3,8 +3,7 @@
util,
)
-def makedate():
- return 0, 0
+# XXX: we should probably offer a devel option to do this in blackbox directly
def getuser():
return 'bob'
def getpid():
@@ -12,6 +11,5 @@
# mock the date and user apis so the output is always the same
def uisetup(ui):
- util.makedate = makedate
util.getuser = getuser
util.getpid = getpid
--- a/tests/run-tests.py Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/run-tests.py Tue Jun 20 16:33:46 2017 -0400
@@ -58,11 +58,7 @@
import socket
import subprocess
import sys
-try:
- import sysconfig
-except ImportError:
- # sysconfig doesn't exist in Python 2.6
- sysconfig = None
+import sysconfig
import tempfile
import threading
import time
@@ -94,7 +90,7 @@
return p.decode('utf-8')
elif sys.version_info >= (3, 0, 0):
- print('%s is only supported on Python 3.5+ and 2.6-2.7, not %s' %
+ print('%s is only supported on Python 3.5+ and 2.7, not %s' %
(sys.argv[0], '.'.join(str(v) for v in sys.version_info[:3])))
sys.exit(70) # EX_SOFTWARE from `man 3 sysexit`
else:
@@ -220,6 +216,22 @@
f.close()
return entries
+def parsettestcases(path):
+ """read a .t test file, return a set of test case names
+
+ If path does not exist, return an empty set.
+ """
+ cases = set()
+ try:
+ with open(path, 'rb') as f:
+ for l in f:
+ if l.startswith(b'#testcases '):
+ cases.update(l[11:].split())
+ except IOError as ex:
+ if ex.errno != errno.ENOENT:
+ raise
+ return cases
+
def getparser():
"""Obtain the OptionParser used by the CLI."""
parser = optparse.OptionParser("%prog [options] [tests]")
@@ -251,6 +263,8 @@
help="keep temporary directory after running tests")
parser.add_option("-k", "--keywords",
help="run tests matching keywords")
+ parser.add_option("--list-tests", action="store_true",
+ help="list tests instead of running them")
parser.add_option("-l", "--local", action="store_true",
help="shortcut for --with-hg=<testdir>/../hg, "
"and --with-chg=<testdir>/../contrib/chg/chg if --chg is set")
@@ -260,6 +274,8 @@
help="run each test N times (default=1)", default=1)
parser.add_option("-n", "--nodiff", action="store_true",
help="skip showing test changes")
+ parser.add_option("--outputdir", type="string",
+ help="directory to write error logs to (default=test directory)")
parser.add_option("-p", "--port", type="int",
help="port on which servers should listen"
" (default: $%s or %d)" % defaults['port'])
@@ -305,7 +321,7 @@
parser.add_option("--ipv6", action="store_true",
help="prefer IPv6 to IPv4 for network related tests")
parser.add_option("-3", "--py3k-warnings", action="store_true",
- help="enable Py3k warnings on Python 2.6+")
+ help="enable Py3k warnings on Python 2.7+")
# This option should be deleted once test-check-py3-compat.t and other
# Python 3 tests run with Python 3.
parser.add_option("--with-python3", metavar="PYTHON3",
@@ -345,7 +361,7 @@
if not (os.path.isfile(options.with_hg) and
os.access(options.with_hg, os.X_OK)):
parser.error('--with-hg must specify an executable hg script')
- if not os.path.basename(options.with_hg) == b'hg':
+ if os.path.basename(options.with_hg) not in [b'hg', b'hg.exe']:
sys.stderr.write('warning: --with-hg should specify an hg script\n')
if options.local:
testdir = os.path.dirname(_bytespath(canonpath(sys.argv[0])))
@@ -423,7 +439,7 @@
if options.py3k_warnings:
if PYTHON3:
parser.error(
- '--py3k-warnings can only be used on Python 2.6 and 2.7')
+ '--py3k-warnings can only be used on Python 2.7')
if options.with_python3:
if PYTHON3:
parser.error('--with-python3 cannot be used when executing with '
@@ -527,10 +543,10 @@
sys.stdout.flush()
def terminate(proc):
- """Terminate subprocess (with fallback for Python versions < 2.6)"""
+ """Terminate subprocess"""
vlog('# Terminating process %d' % proc.pid)
try:
- getattr(proc, 'terminate', lambda : os.kill(proc.pid, signal.SIGTERM))()
+ proc.terminate()
except OSError:
pass
@@ -550,7 +566,7 @@
# Status code reserved for skipped tests (used by hghave).
SKIPPED_STATUS = 80
- def __init__(self, path, tmpdir, keeptmpdir=False,
+ def __init__(self, path, outputdir, tmpdir, keeptmpdir=False,
debug=False,
timeout=defaults['timeout'],
startport=defaults['port'], extraconfigopts=None,
@@ -591,7 +607,9 @@
self.bname = os.path.basename(path)
self.name = _strpath(self.bname)
self._testdir = os.path.dirname(path)
- self.errpath = os.path.join(self._testdir, b'%s.err' % self.bname)
+ self._outputdir = outputdir
+ self._tmpname = os.path.basename(path)
+ self.errpath = os.path.join(self._outputdir, b'%s.err' % self.bname)
self._threadtmp = tmpdir
self._keeptmpdir = keeptmpdir
@@ -650,7 +668,7 @@
if e.errno != errno.EEXIST:
raise
- name = os.path.basename(self.path)
+ name = self._tmpname
self._testtmp = os.path.join(self._threadtmp, name)
os.mkdir(self._testtmp)
@@ -693,21 +711,12 @@
except KeyboardInterrupt:
self._aborted = True
raise
- except SkipTest as e:
+ except unittest.SkipTest as e:
result.addSkip(self, str(e))
# The base class will have already counted this as a
# test we "ran", but we want to exclude skipped tests
# from those we count towards those run.
result.testsRun -= 1
- except IgnoreTest as e:
- result.addIgnore(self, str(e))
- # As with skips, ignores also should be excluded from
- # the number of tests executed.
- result.testsRun -= 1
- except WarnTest as e:
- result.addWarn(self, str(e))
- except ReportedTest as e:
- pass
except self.failureException as e:
# This differs from unittest in that we don't capture
# the stack trace. This is for historical reasons and
@@ -771,11 +780,11 @@
self.fail('hg have failed checking for %s' % failed[-1])
else:
self._skipped = True
- raise SkipTest(missing[-1])
+ raise unittest.SkipTest(missing[-1])
elif ret == 'timeout':
self.fail('timed out')
elif ret is False:
- raise WarnTest('no result code from test')
+ self.fail('no result code from test')
elif out != self._refout:
# Diff generation may rely on written .err file.
if (ret != 0 or out != self._refout) and not self._skipped \
@@ -829,7 +838,7 @@
def _run(self, env):
# This should be implemented in child classes to run tests.
- raise SkipTest('unknown test type')
+ raise unittest.SkipTest('unknown test type')
def abort(self):
"""Terminate execution of this test."""
@@ -882,8 +891,7 @@
offset = '' if i == 0 else '%s' % i
env["HGPORT%s" % offset] = '%s' % (self._startport + i)
env = os.environ.copy()
- if sysconfig is not None:
- env['PYTHONUSERBASE'] = sysconfig.get_config_var('userbase')
+ env['PYTHONUSERBASE'] = sysconfig.get_config_var('userbase')
env['HGEMITWARNINGS'] = '1'
env['TESTTMP'] = self._testtmp
env['HOME'] = self._testtmp
@@ -938,12 +946,9 @@
hgrc.write(b'mergemarkers = detailed\n')
hgrc.write(b'promptecho = True\n')
hgrc.write(b'[defaults]\n')
- hgrc.write(b'backout = -d "0 0"\n')
- hgrc.write(b'commit = -d "0 0"\n')
- hgrc.write(b'shelve = --date "0 0"\n')
- hgrc.write(b'tag = -d "0 0"\n')
hgrc.write(b'[devel]\n')
hgrc.write(b'all-warnings = true\n')
+ hgrc.write(b'default-date = 0 0\n')
hgrc.write(b'[largefiles]\n')
hgrc.write(b'usercache = %s\n' %
(os.path.join(self._testtmp, b'.cache/largefiles')))
@@ -1060,6 +1065,19 @@
ESCAPEMAP = dict((bchr(i), br'\x%02x' % i) for i in range(256))
ESCAPEMAP.update({b'\\': b'\\\\', b'\r': br'\r'})
+ def __init__(self, path, *args, **kwds):
+ # accept an extra "case" parameter
+ case = None
+ if 'case' in kwds:
+ case = kwds.pop('case')
+ self._case = case
+ self._allcases = parsettestcases(path)
+ super(TTest, self).__init__(path, *args, **kwds)
+ if case:
+ self.name = '%s (case %s)' % (self.name, _strpath(case))
+ self.errpath = b'%s.%s.err' % (self.errpath[:-4], case)
+ self._tmpname += b'-%s' % case
+
@property
def refpath(self):
return os.path.join(self._testdir, self.bname)
@@ -1115,6 +1133,20 @@
self._timeout = self._slowtimeout
return True, None
+ def _iftest(self, args):
+ # implements "#if"
+ reqs = []
+ for arg in args:
+ if arg.startswith(b'no-') and arg[3:] in self._allcases:
+ if arg[3:] == self._case:
+ return False
+ elif arg in self._allcases:
+ if arg != self._case:
+ return False
+ else:
+ reqs.append(arg)
+ return self._hghave(reqs)[0]
+
def _parsetest(self, lines):
# We generate a shell script which outputs unique markers to line
# up script results with our source. These markers include input
@@ -1172,7 +1204,7 @@
after.setdefault(pos, []).append(' !!! invalid #if\n')
if skipping is not None:
after.setdefault(pos, []).append(' !!! nested #if\n')
- skipping = not self._hghave(lsplit[1:])[0]
+ skipping = not self._iftest(lsplit[1:])
after.setdefault(pos, []).append(l)
elif l.startswith(b'#else'):
if skipping is None:
@@ -1445,18 +1477,6 @@
iolock = threading.RLock()
-class SkipTest(Exception):
- """Raised to indicate that a test is to be skipped."""
-
-class IgnoreTest(Exception):
- """Raised to indicate that a test is to be ignored."""
-
-class WarnTest(Exception):
- """Raised to indicate that a test warned."""
-
-class ReportedTest(Exception):
- """Raised to indicate that a test already reported."""
-
class TestResult(unittest._TextTestResult):
"""Holds results when executing via unittest."""
# Don't worry too much about accessing the non-public _TextTestResult.
@@ -1475,11 +1495,6 @@
# sense to map it into skip some day.
self.ignored = []
- # We have a custom "warned" result that isn't present in any Python
- # unittest implementation. It is very similar to failed. It may make
- # sense to map it into fail some day.
- self.warned = []
-
self.times = []
self._firststarttime = None
# Data stored for the benefit of generating xunit reports.
@@ -1534,19 +1549,6 @@
self.testsRun += 1
self.stream.flush()
- def addWarn(self, test, reason):
- self.warned.append((test, reason))
-
- if self._options.first:
- self.stop()
-
- with iolock:
- if self.showAll:
- self.stream.writeln('warned %s' % reason)
- else:
- self.stream.write('~')
- self.stream.flush()
-
def addOutputMismatch(self, test, ret, got, expected):
"""Record a mismatch in test output for a particular test."""
if self.shouldStop:
@@ -1571,10 +1573,8 @@
servefail, lines = getdiff(expected, got,
test.refpath, test.errpath)
if servefail:
- self.addFailure(
- test,
+ raise test.failureException(
'server failed to start (HGPORT=%s)' % test._startport)
- raise ReportedTest('server failed to start')
else:
self.stream.write('\n')
for line in lines:
@@ -1689,7 +1689,7 @@
def get():
num_tests[0] += 1
if getattr(test, 'should_reload', False):
- return self._loadtest(test.path, num_tests[0])
+ return self._loadtest(test, num_tests[0])
return test
if not os.path.exists(test.path):
result.addSkip(test, "Doesn't exist")
@@ -1731,6 +1731,8 @@
if not v:
channel = n
break
+ else:
+ raise ValueError('Could not find output channel')
channels[channel] = "=" + test.name[5:].split(".")[0]
try:
test(result)
@@ -1740,10 +1742,11 @@
except: # re-raises
done.put(('!', test, 'run-test raised an error, see traceback'))
raise
- try:
- channels[channel] = ''
- except IndexError:
- pass
+ finally:
+ try:
+ channels[channel] = ''
+ except IndexError:
+ pass
def stat():
count = 0
@@ -1787,7 +1790,7 @@
if getattr(test, 'should_reload', False):
num_tests[0] += 1
tests.append(
- self._loadtest(test.name, num_tests[0]))
+ self._loadtest(test, num_tests[0]))
else:
tests.append(test)
if self._jobs == 1:
@@ -1822,10 +1825,10 @@
# alphabetically, while times for each test are listed from oldest to
# newest.
-def loadtimes(testdir):
+def loadtimes(outputdir):
times = []
try:
- with open(os.path.join(testdir, b'.testtimes-')) as fp:
+ with open(os.path.join(outputdir, b'.testtimes-')) as fp:
for line in fp:
ts = line.split()
times.append((ts[0], [float(t) for t in ts[1:]]))
@@ -1834,8 +1837,8 @@
raise
return times
-def savetimes(testdir, result):
- saved = dict(loadtimes(testdir))
+def savetimes(outputdir, result):
+ saved = dict(loadtimes(outputdir))
maxruns = 5
skipped = set([str(t[0]) for t in result.skipped])
for tdata in result.times:
@@ -1846,11 +1849,11 @@
ts[:] = ts[-maxruns:]
fd, tmpname = tempfile.mkstemp(prefix=b'.testtimes',
- dir=testdir, text=True)
+ dir=outputdir, text=True)
with os.fdopen(fd, 'w') as fp:
for name, ts in sorted(saved.items()):
fp.write('%s %s\n' % (name, ' '.join(['%.3f' % (t,) for t in ts])))
- timepath = os.path.join(testdir, b'.testtimes')
+ timepath = os.path.join(outputdir, b'.testtimes')
try:
os.unlink(timepath)
except OSError:
@@ -1868,6 +1871,25 @@
self._runner = runner
+ def listtests(self, test):
+ result = TestResult(self._runner.options, self.stream,
+ self.descriptions, 0)
+ test = sorted(test, key=lambda t: t.name)
+ for t in test:
+ print(t.name)
+ result.addSuccess(t)
+
+ if self._runner.options.xunit:
+ with open(self._runner.options.xunit, "wb") as xuf:
+ self._writexunit(result, xuf)
+
+ if self._runner.options.json:
+ jsonpath = os.path.join(self._runner._outputdir, b'report.json')
+ with open(jsonpath, 'w') as fp:
+ self._writejson(result, fp)
+
+ return result
+
def run(self, test):
result = TestResult(self._runner.options, self.stream,
self.descriptions, self.verbosity)
@@ -1875,7 +1897,6 @@
test(result)
failed = len(result.failures)
- warned = len(result.warned)
skipped = len(result.skipped)
ignored = len(result.ignored)
@@ -1885,79 +1906,23 @@
if not self._runner.options.noskips:
for test, msg in result.skipped:
self.stream.writeln('Skipped %s: %s' % (test.name, msg))
- for test, msg in result.warned:
- self.stream.writeln('Warned %s: %s' % (test.name, msg))
for test, msg in result.failures:
self.stream.writeln('Failed %s: %s' % (test.name, msg))
for test, msg in result.errors:
self.stream.writeln('Errored %s: %s' % (test.name, msg))
if self._runner.options.xunit:
- with open(self._runner.options.xunit, 'wb') as xuf:
- timesd = dict((t[0], t[3]) for t in result.times)
- doc = minidom.Document()
- s = doc.createElement('testsuite')
- s.setAttribute('name', 'run-tests')
- s.setAttribute('tests', str(result.testsRun))
- s.setAttribute('errors', "0") # TODO
- s.setAttribute('failures', str(failed))
- s.setAttribute('skipped', str(skipped + ignored))
- doc.appendChild(s)
- for tc in result.successes:
- t = doc.createElement('testcase')
- t.setAttribute('name', tc.name)
- t.setAttribute('time', '%.3f' % timesd[tc.name])
- s.appendChild(t)
- for tc, err in sorted(result.faildata.items()):
- t = doc.createElement('testcase')
- t.setAttribute('name', tc)
- t.setAttribute('time', '%.3f' % timesd[tc])
- # createCDATASection expects a unicode or it will
- # convert using default conversion rules, which will
- # fail if string isn't ASCII.
- err = cdatasafe(err).decode('utf-8', 'replace')
- cd = doc.createCDATASection(err)
- t.appendChild(cd)
- s.appendChild(t)
- xuf.write(doc.toprettyxml(indent=' ', encoding='utf-8'))
+ with open(self._runner.options.xunit, "wb") as xuf:
+ self._writexunit(result, xuf)
if self._runner.options.json:
- jsonpath = os.path.join(self._runner._testdir, b'report.json')
+ jsonpath = os.path.join(self._runner._outputdir, b'report.json')
with open(jsonpath, 'w') as fp:
- timesd = {}
- for tdata in result.times:
- test = tdata[0]
- timesd[test] = tdata[1:]
-
- outcome = {}
- groups = [('success', ((tc, None)
- for tc in result.successes)),
- ('failure', result.failures),
- ('skip', result.skipped)]
- for res, testcases in groups:
- for tc, __ in testcases:
- if tc.name in timesd:
- diff = result.faildata.get(tc.name, b'')
- tres = {'result': res,
- 'time': ('%0.3f' % timesd[tc.name][2]),
- 'cuser': ('%0.3f' % timesd[tc.name][0]),
- 'csys': ('%0.3f' % timesd[tc.name][1]),
- 'start': ('%0.3f' % timesd[tc.name][3]),
- 'end': ('%0.3f' % timesd[tc.name][4]),
- 'diff': diff.decode('unicode_escape'),
- }
- else:
- # blacklisted test
- tres = {'result': res}
-
- outcome[tc.name] = tres
- jsonout = json.dumps(outcome, sort_keys=True, indent=4,
- separators=(',', ': '))
- fp.writelines(("testreport =", jsonout))
+ self._writejson(result, fp)
self._runner._checkhglib('Tested')
- savetimes(self._runner._testdir, result)
+ savetimes(self._runner._outputdir, result)
if failed and self._runner.options.known_good_rev:
def nooutput(args):
@@ -1993,14 +1958,14 @@
'%s %s by %s (%s)' % (
test, verb, dat['node'], dat['summary']))
self.stream.writeln(
- '# Ran %d tests, %d skipped, %d warned, %d failed.'
- % (result.testsRun,
- skipped + ignored, warned, failed))
+ '# Ran %d tests, %d skipped, %d failed.'
+ % (result.testsRun, skipped + ignored, failed))
if failed:
self.stream.writeln('python hash seed: %s' %
os.environ['PYTHONHASHSEED'])
if self._runner.options.time:
self.printtimes(result.times)
+ self.stream.flush()
return result
@@ -2016,6 +1981,95 @@
cuser, csys, real, start, end = tdata[1:6]
self.stream.writeln(cols % (start, end, cuser, csys, real, test))
+ @staticmethod
+ def _writexunit(result, outf):
+ # See http://llg.cubic.org/docs/junit/ for a reference.
+ timesd = dict((t[0], t[3]) for t in result.times)
+ doc = minidom.Document()
+ s = doc.createElement('testsuite')
+ s.setAttribute('name', 'run-tests')
+ s.setAttribute('tests', str(result.testsRun))
+ s.setAttribute('errors', "0") # TODO
+ s.setAttribute('failures', str(len(result.failures)))
+ s.setAttribute('skipped', str(len(result.skipped) +
+ len(result.ignored)))
+ doc.appendChild(s)
+ for tc in result.successes:
+ t = doc.createElement('testcase')
+ t.setAttribute('name', tc.name)
+ tctime = timesd.get(tc.name)
+ if tctime is not None:
+ t.setAttribute('time', '%.3f' % tctime)
+ s.appendChild(t)
+ for tc, err in sorted(result.faildata.items()):
+ t = doc.createElement('testcase')
+ t.setAttribute('name', tc)
+ tctime = timesd.get(tc)
+ if tctime is not None:
+ t.setAttribute('time', '%.3f' % tctime)
+ # createCDATASection expects a unicode or it will
+ # convert using default conversion rules, which will
+ # fail if string isn't ASCII.
+ err = cdatasafe(err).decode('utf-8', 'replace')
+ cd = doc.createCDATASection(err)
+ # Use 'failure' here instead of 'error' to match errors = 0,
+ # failures = len(result.failures) in the testsuite element.
+ failelem = doc.createElement('failure')
+ failelem.setAttribute('message', 'output changed')
+ failelem.setAttribute('type', 'output-mismatch')
+ failelem.appendChild(cd)
+ t.appendChild(failelem)
+ s.appendChild(t)
+ for tc, message in result.skipped:
+ # According to the schema, 'skipped' has no attributes. So store
+ # the skip message as a text node instead.
+ t = doc.createElement('testcase')
+ t.setAttribute('name', tc.name)
+ message = cdatasafe(message).decode('utf-8', 'replace')
+ cd = doc.createCDATASection(message)
+ skipelem = doc.createElement('skipped')
+ skipelem.appendChild(cd)
+ t.appendChild(skipelem)
+ s.appendChild(t)
+ outf.write(doc.toprettyxml(indent=' ', encoding='utf-8'))
+
+ @staticmethod
+ def _writejson(result, outf):
+ timesd = {}
+ for tdata in result.times:
+ test = tdata[0]
+ timesd[test] = tdata[1:]
+
+ outcome = {}
+ groups = [('success', ((tc, None)
+ for tc in result.successes)),
+ ('failure', result.failures),
+ ('skip', result.skipped)]
+ for res, testcases in groups:
+ for tc, __ in testcases:
+ if tc.name in timesd:
+ diff = result.faildata.get(tc.name, b'')
+ try:
+ diff = diff.decode('unicode_escape')
+ except UnicodeDecodeError as e:
+ diff = '%r decoding diff, sorry' % e
+ tres = {'result': res,
+ 'time': ('%0.3f' % timesd[tc.name][2]),
+ 'cuser': ('%0.3f' % timesd[tc.name][0]),
+ 'csys': ('%0.3f' % timesd[tc.name][1]),
+ 'start': ('%0.3f' % timesd[tc.name][3]),
+ 'end': ('%0.3f' % timesd[tc.name][4]),
+ 'diff': diff,
+ }
+ else:
+ # blacklisted test
+ tres = {'result': res}
+
+ outcome[tc.name] = tres
+ jsonout = json.dumps(outcome, sort_keys=True, indent=4,
+ separators=(',', ': '))
+ outf.writelines(("testreport =", jsonout))
+
class TestRunner(object):
"""Holds context for executing tests.
@@ -2024,7 +2078,6 @@
# Programs required to run tests.
REQUIREDTOOLS = [
- os.path.basename(_bytespath(sys.executable)),
b'diff',
b'grep',
b'unzip',
@@ -2043,6 +2096,7 @@
self.options = None
self._hgroot = None
self._testdir = None
+ self._outputdir = None
self._hgtmp = None
self._installdir = None
self._bindir = None
@@ -2067,11 +2121,11 @@
self.options = options
self._checktools()
- tests = self.findtests(args)
+ testdescs = self.findtests(args)
if options.profile_runner:
import statprof
statprof.start()
- result = self._run(tests)
+ result = self._run(testdescs)
if options.profile_runner:
statprof.stop()
statprof.display()
@@ -2080,9 +2134,9 @@
finally:
os.umask(oldmask)
- def _run(self, tests):
+ def _run(self, testdescs):
if self.options.random:
- random.shuffle(tests)
+ random.shuffle(testdescs)
else:
# keywords for slow tests
slow = {b'svn': 10,
@@ -2100,6 +2154,7 @@
perf = {}
def sortkey(f):
# run largest tests first, as they tend to take the longest
+ f = f['path']
try:
return perf[f]
except KeyError:
@@ -2117,10 +2172,14 @@
val /= 10.0
perf[f] = val / 1000.0
return perf[f]
- tests.sort(key=sortkey)
+ testdescs.sort(key=sortkey)
self._testdir = osenvironb[b'TESTDIR'] = getattr(
os, 'getcwdb', os.getcwd)()
+ if self.options.outputdir:
+ self._outputdir = canonpath(_bytespath(self.options.outputdir))
+ else:
+ self._outputdir = self._testdir
if 'PYTHONHASHSEED' not in os.environ:
# use a random python hash seed all the time
@@ -2245,9 +2304,10 @@
vlog("# Using HGTMP", self._hgtmp)
vlog("# Using PATH", os.environ["PATH"])
vlog("# Using", IMPL_PATH, osenvironb[IMPL_PATH])
+ vlog("# Writing to directory", self._outputdir)
try:
- return self._runtests(tests) or 0
+ return self._runtests(testdescs) or 0
finally:
time.sleep(.1)
self._cleanup()
@@ -2267,35 +2327,52 @@
else:
args = os.listdir(b'.')
- return [t for t in args
- if os.path.basename(t).startswith(b'test-')
- and (t.endswith(b'.py') or t.endswith(b'.t'))]
-
- def _runtests(self, tests):
- try:
- if self._installdir:
- self._installhg()
- self._checkhglib("Testing")
+ tests = []
+ for t in args:
+ if not (os.path.basename(t).startswith(b'test-')
+ and (t.endswith(b'.py') or t.endswith(b'.t'))):
+ continue
+ if t.endswith(b'.t'):
+ # .t file may contain multiple test cases
+ cases = sorted(parsettestcases(t))
+ if cases:
+ tests += [{'path': t, 'case': c} for c in sorted(cases)]
+ else:
+ tests.append({'path': t})
else:
- self._usecorrectpython()
- if self.options.chg:
- assert self._installdir
- self._installchg()
+ tests.append({'path': t})
+ return tests
+ def _runtests(self, testdescs):
+ def _reloadtest(test, i):
+ # convert a test back to its description dict
+ desc = {'path': test.path}
+ case = getattr(test, '_case', None)
+ if case:
+ desc['case'] = case
+ return self._gettest(desc, i)
+
+ try:
if self.options.restart:
- orig = list(tests)
- while tests:
- if os.path.exists(tests[0] + ".err"):
+ orig = list(testdescs)
+ while testdescs:
+ desc = testdescs[0]
+ # desc['path'] is a relative path
+ if 'case' in desc:
+ errpath = b'%s.%s.err' % (desc['path'], desc['case'])
+ else:
+ errpath = b'%s.err' % desc['path']
+ errpath = os.path.join(self._outputdir, errpath)
+ if os.path.exists(errpath):
break
- tests.pop(0)
- if not tests:
+ testdescs.pop(0)
+ if not testdescs:
print("running all tests")
- tests = orig
+ testdescs = orig
- tests = [self._gettest(t, i) for i, t in enumerate(tests)]
+ tests = [self._gettest(d, i) for i, d in enumerate(testdescs)]
failed = False
- warned = False
kws = self.options.keywords
if kws is not None and PYTHON3:
kws = kws.encode('utf-8')
@@ -2309,17 +2386,28 @@
loop=self.options.loop,
runs_per_test=self.options.runs_per_test,
showchannels=self.options.showchannels,
- tests=tests, loadtest=self._gettest)
+ tests=tests, loadtest=_reloadtest)
verbosity = 1
if self.options.verbose:
verbosity = 2
runner = TextTestRunner(self, verbosity=verbosity)
- result = runner.run(suite)
+
+ if self.options.list_tests:
+ result = runner.listtests(suite)
+ else:
+ if self._installdir:
+ self._installhg()
+ self._checkhglib("Testing")
+ else:
+ self._usecorrectpython()
+ if self.options.chg:
+ assert self._installdir
+ self._installchg()
+
+ result = runner.run(suite)
if result.failures:
failed = True
- if result.warned:
- warned = True
if self.options.anycoverage:
self._outputcoverage()
@@ -2329,8 +2417,6 @@
if failed:
return 1
- if warned:
- return 80
def _getport(self, count):
port = self._ports.get(count) # do we have a cached entry?
@@ -2350,13 +2436,14 @@
self._ports[count] = port
return port
- def _gettest(self, test, count):
+ def _gettest(self, testdesc, count):
"""Obtain a Test by looking at its filename.
Returns a Test instance. The Test may not be runnable if it doesn't
map to a known type.
"""
- lctest = test.lower()
+ path = testdesc['path']
+ lctest = path.lower()
testcls = Test
for ext, cls in self.TESTTYPES:
@@ -2364,10 +2451,13 @@
testcls = cls
break
- refpath = os.path.join(self._testdir, test)
+ refpath = os.path.join(self._testdir, path)
tmpdir = os.path.join(self._hgtmp, b'child%d' % count)
- t = testcls(refpath, tmpdir,
+ # extra keyword parameters. 'case' is used by .t tests
+ kwds = dict((k, testdesc[k]) for k in ['case'] if k in testdesc)
+
+ t = testcls(refpath, self._outputdir, tmpdir,
keeptmpdir=self.options.keep_tmpdir,
debug=self.options.debug,
timeout=self.options.timeout,
@@ -2377,7 +2467,7 @@
shell=self.options.shell,
hgcommand=self._hgcommand,
usechg=bool(self.options.with_chg or self.options.chg),
- useipv6=useipv6)
+ useipv6=useipv6, **kwds)
t.should_reload = True
return t
@@ -2621,10 +2711,10 @@
cov.report(ignore_errors=True, omit=omit)
if self.options.htmlcov:
- htmldir = os.path.join(self._testdir, 'htmlcov')
+ htmldir = os.path.join(self._outputdir, 'htmlcov')
cov.html_report(directory=htmldir, omit=omit)
if self.options.annotate:
- adir = os.path.join(self._testdir, 'annotated')
+ adir = os.path.join(self._outputdir, 'annotated')
if not os.path.isdir(adir):
os.mkdir(adir)
cov.annotate(directory=adir, omit=omit)
--- a/tests/test-acl.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-acl.t Tue Jun 20 16:33:46 2017 -0400
@@ -112,11 +112,11 @@
adding foo/file.txt revisions
adding quux/file.py revisions
added 3 changesets with 3 changes to 3 files
- updating the branch cache
bundle2-input-part: total payload size 1553
bundle2-input-part: "pushkey" (params: 4 mandatory) supported
pushing key for "phases:911600dab2ae7a9baff75958b84fe606851ce955"
bundle2-input-bundle: 3 parts total
+ updating the branch cache
bundle2-output-bundle: "HG20", 2 parts total
bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload
bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload
@@ -147,7 +147,6 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
- invalid branchheads cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -177,11 +176,11 @@
added 3 changesets with 3 changes to 3 files
calling hook pretxnchangegroup.acl: hgext.acl.hook
acl: changes have source "push" - skipping
- updating the branch cache
bundle2-input-part: total payload size 1553
bundle2-input-part: "pushkey" (params: 4 mandatory) supported
pushing key for "phases:911600dab2ae7a9baff75958b84fe606851ce955"
bundle2-input-bundle: 3 parts total
+ updating the branch cache
bundle2-output-bundle: "HG20", 2 parts total
bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload
bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload
@@ -213,7 +212,6 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
- invalid branchheads cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -253,11 +251,11 @@
acl: path access granted: "f9cafe1212c8"
acl: branch access granted: "911600dab2ae" on branch "default"
acl: path access granted: "911600dab2ae"
- updating the branch cache
bundle2-input-part: total payload size 1553
bundle2-input-part: "pushkey" (params: 4 mandatory) supported
pushing key for "phases:911600dab2ae7a9baff75958b84fe606851ce955"
bundle2-input-bundle: 3 parts total
+ updating the branch cache
bundle2-output-bundle: "HG20", 2 parts total
bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload
bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload
@@ -289,7 +287,6 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
- invalid branchheads cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -354,7 +351,6 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
- invalid branchheads cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -424,7 +420,6 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
- invalid branchheads cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -491,7 +486,6 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
- invalid branchheads cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -563,7 +557,6 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
- invalid branchheads cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -632,7 +625,6 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
- invalid branchheads cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -703,7 +695,6 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
- invalid branchheads cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -743,11 +734,11 @@
acl: path access granted: "f9cafe1212c8"
acl: branch access granted: "911600dab2ae" on branch "default"
acl: path access granted: "911600dab2ae"
- updating the branch cache
bundle2-input-part: total payload size 1553
bundle2-input-part: "pushkey" (params: 4 mandatory) supported
pushing key for "phases:911600dab2ae7a9baff75958b84fe606851ce955"
bundle2-input-bundle: 3 parts total
+ updating the branch cache
bundle2-output-bundle: "HG20", 2 parts total
bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload
bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload
@@ -786,7 +777,6 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
- invalid branchheads cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -864,7 +854,6 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
- invalid branchheads cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -937,7 +926,6 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
- invalid branchheads cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -1021,7 +1009,6 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
- invalid branchheads cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -1061,11 +1048,11 @@
acl: path access granted: "f9cafe1212c8"
acl: branch access granted: "911600dab2ae" on branch "default"
acl: path access granted: "911600dab2ae"
- updating the branch cache
bundle2-input-part: total payload size 1553
bundle2-input-part: "pushkey" (params: 4 mandatory) supported
pushing key for "phases:911600dab2ae7a9baff75958b84fe606851ce955"
bundle2-input-bundle: 3 parts total
+ updating the branch cache
bundle2-output-bundle: "HG20", 2 parts total
bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload
bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload
@@ -1107,7 +1094,6 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
- invalid branchheads cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -1147,11 +1133,11 @@
acl: path access granted: "f9cafe1212c8"
acl: branch access granted: "911600dab2ae" on branch "default"
acl: path access granted: "911600dab2ae"
- updating the branch cache
bundle2-input-part: total payload size 1553
bundle2-input-part: "pushkey" (params: 4 mandatory) supported
pushing key for "phases:911600dab2ae7a9baff75958b84fe606851ce955"
bundle2-input-bundle: 3 parts total
+ updating the branch cache
bundle2-output-bundle: "HG20", 2 parts total
bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload
bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload
@@ -1189,7 +1175,6 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
- invalid branchheads cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -1265,7 +1250,6 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
- invalid branchheads cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -1306,11 +1290,11 @@
acl: path access granted: "f9cafe1212c8"
acl: branch access granted: "911600dab2ae" on branch "default"
acl: path access granted: "911600dab2ae"
- updating the branch cache
bundle2-input-part: total payload size 1553
bundle2-input-part: "pushkey" (params: 4 mandatory) supported
pushing key for "phases:911600dab2ae7a9baff75958b84fe606851ce955"
bundle2-input-bundle: 3 parts total
+ updating the branch cache
bundle2-output-bundle: "HG20", 2 parts total
bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload
bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload
@@ -1348,7 +1332,6 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
- invalid branchheads cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -1508,13 +1491,13 @@
acl: path access granted: "911600dab2ae"
acl: branch access granted: "e8fc755d4d82" on branch "foobar"
acl: path access granted: "e8fc755d4d82"
- updating the branch cache
bundle2-input-part: total payload size 2068
bundle2-input-part: "pushkey" (params: 4 mandatory) supported
pushing key for "phases:911600dab2ae7a9baff75958b84fe606851ce955"
bundle2-input-part: "pushkey" (params: 4 mandatory) supported
pushing key for "phases:e8fc755d4d8217ee5b0c2bb41558c40d43b92c01"
bundle2-input-bundle: 4 parts total
+ updating the branch cache
bundle2-output-bundle: "HG20", 3 parts total
bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload
bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload
@@ -1804,13 +1787,13 @@
acl: path access granted: "911600dab2ae"
acl: branch access granted: "e8fc755d4d82" on branch "foobar"
acl: path access granted: "e8fc755d4d82"
- updating the branch cache
bundle2-input-part: total payload size 2068
bundle2-input-part: "pushkey" (params: 4 mandatory) supported
pushing key for "phases:911600dab2ae7a9baff75958b84fe606851ce955"
bundle2-input-part: "pushkey" (params: 4 mandatory) supported
pushing key for "phases:e8fc755d4d8217ee5b0c2bb41558c40d43b92c01"
bundle2-input-bundle: 4 parts total
+ updating the branch cache
bundle2-output-bundle: "HG20", 3 parts total
bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload
bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload
@@ -1897,13 +1880,13 @@
acl: path access granted: "911600dab2ae"
acl: branch access granted: "e8fc755d4d82" on branch "foobar"
acl: path access granted: "e8fc755d4d82"
- updating the branch cache
bundle2-input-part: total payload size 2068
bundle2-input-part: "pushkey" (params: 4 mandatory) supported
pushing key for "phases:911600dab2ae7a9baff75958b84fe606851ce955"
bundle2-input-part: "pushkey" (params: 4 mandatory) supported
pushing key for "phases:e8fc755d4d8217ee5b0c2bb41558c40d43b92c01"
bundle2-input-bundle: 4 parts total
+ updating the branch cache
bundle2-output-bundle: "HG20", 3 parts total
bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload
bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload
@@ -2058,13 +2041,13 @@
acl: path access granted: "911600dab2ae"
acl: branch access granted: "e8fc755d4d82" on branch "foobar"
acl: path access granted: "e8fc755d4d82"
- updating the branch cache
bundle2-input-part: total payload size 2068
bundle2-input-part: "pushkey" (params: 4 mandatory) supported
pushing key for "phases:911600dab2ae7a9baff75958b84fe606851ce955"
bundle2-input-part: "pushkey" (params: 4 mandatory) supported
pushing key for "phases:e8fc755d4d8217ee5b0c2bb41558c40d43b92c01"
bundle2-input-bundle: 4 parts total
+ updating the branch cache
bundle2-output-bundle: "HG20", 3 parts total
bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload
bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload
--- a/tests/test-add.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-add.t Tue Jun 20 16:33:46 2017 -0400
@@ -196,7 +196,6 @@
adding CapsDir1/CapsDir/SubDir/Def.txt (glob)
$ hg forget capsdir1/capsdir/abc.txt
- removing CapsDir1/CapsDir/AbC.txt (glob)
$ hg forget capsdir1/capsdir
removing CapsDir1/CapsDir/SubDir/Def.txt (glob)
@@ -232,7 +231,6 @@
+def
$ hg mv CapsDir1/CapsDir/abc.txt CapsDir1/CapsDir/ABC.txt
- moving CapsDir1/CapsDir/AbC.txt to CapsDir1/CapsDir/ABC.txt (glob)
$ hg ci -m "case changing rename" CapsDir1/CapsDir/AbC.txt CapsDir1/CapsDir/ABC.txt
$ hg status -A capsdir1/capsdir
--- a/tests/test-addremove-similar.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-addremove-similar.t Tue Jun 20 16:33:46 2017 -0400
@@ -57,7 +57,7 @@
should be sorted by path for stable result
- $ for i in `python $TESTDIR/seq.py 0 9`; do
+ $ for i in `$PYTHON $TESTDIR/seq.py 0 9`; do
> cp small-file $i
> done
$ rm small-file
@@ -88,7 +88,7 @@
pick one from many identical files
$ cp 0 a
- $ rm `python $TESTDIR/seq.py 0 9`
+ $ rm `$PYTHON $TESTDIR/seq.py 0 9`
$ hg addremove
removing 0
removing 1
@@ -107,11 +107,11 @@
pick one from many similar files
$ cp 0 a
- $ for i in `python $TESTDIR/seq.py 0 9`; do
+ $ for i in `$PYTHON $TESTDIR/seq.py 0 9`; do
> echo $i >> $i
> done
$ hg commit -m 'make them slightly different'
- $ rm `python $TESTDIR/seq.py 0 9`
+ $ rm `$PYTHON $TESTDIR/seq.py 0 9`
$ hg addremove -s50
removing 0
removing 1
--- a/tests/test-ancestor.py Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-ancestor.py Tue Jun 20 16:33:46 2017 -0400
@@ -13,10 +13,15 @@
ancestor,
debugcommands,
hg,
+ pycompat,
ui as uimod,
util,
)
+if pycompat.ispy3:
+ long = int
+ xrange = range
+
def buildgraph(rng, nodes=100, rootprob=0.05, mergeprob=0.2, prevprob=0.7):
'''nodes: total number of nodes in the graph
rootprob: probability that a new node (not 0) will be a root
@@ -37,7 +42,7 @@
p1 = i - 1
else:
p1 = rng.randrange(i - 1)
- p2 = rng.choice(range(0, p1) + range(p1 + 1, i))
+ p2 = rng.choice(list(range(0, p1)) + list(range(p1 + 1, i)))
graph[i] = [p1, p2]
elif rng.random() < prevprob:
graph[i] = [i - 1]
@@ -49,7 +54,7 @@
def buildancestorsets(graph):
ancs = [None] * len(graph)
for i in xrange(len(graph)):
- ancs[i] = set([i])
+ ancs[i] = {i}
if graph[i] == [nullrev]:
continue
for p in graph[i]:
@@ -220,7 +225,7 @@
def test_gca():
u = uimod.ui.load()
for i, dag in enumerate(dagtests):
- repo = hg.repository(u, 'gca%d' % i, create=1)
+ repo = hg.repository(u, b'gca%d' % i, create=1)
cl = repo.changelog
if not util.safehasattr(cl.index, 'ancestors'):
# C version not available
--- a/tests/test-annotate.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-annotate.t Tue Jun 20 16:33:46 2017 -0400
@@ -56,21 +56,18 @@
$ hg annotate -Tjson a
[
{
- "line": "a\n",
- "rev": 0
+ "abspath": "a",
+ "lines": [{"line": "a\n", "rev": 0}],
+ "path": "a"
}
]
$ hg annotate -Tjson -cdfnul a
[
{
- "date": [1.0, 0],
- "file": "a",
- "line": "a\n",
- "line_number": 1,
- "node": "8435f90966e442695d2ded29fdade2bac5ad8065",
- "rev": 0,
- "user": "nobody"
+ "abspath": "a",
+ "lines": [{"date": [1.0, 0], "file": "a", "line": "a\n", "line_number": 1, "node": "8435f90966e442695d2ded29fdade2bac5ad8065", "rev": 0, "user": "nobody"}],
+ "path": "a"
}
]
@@ -88,6 +85,37 @@
> EOF
$ hg ci -mb2 -d '2 0'
+annotate multiple files (JSON)
+
+ $ hg annotate -Tjson a b
+ [
+ {
+ "abspath": "a",
+ "lines": [{"line": "a\n", "rev": 0}, {"line": "a\n", "rev": 1}, {"line": "a\n", "rev": 1}],
+ "path": "a"
+ },
+ {
+ "abspath": "b",
+ "lines": [{"line": "a\n", "rev": 0}, {"line": "a\n", "rev": 1}, {"line": "a\n", "rev": 1}, {"line": "b4\n", "rev": 3}, {"line": "b5\n", "rev": 3}, {"line": "b6\n", "rev": 3}],
+ "path": "b"
+ }
+ ]
+
+annotate multiple files (template)
+
+ $ hg annotate -T'== {abspath} ==\n{lines % "{rev}: {line}"}' a b
+ == a ==
+ 0: a
+ 1: a
+ 1: a
+ == b ==
+ 0: a
+ 1: a
+ 1: a
+ 3: b4
+ 3: b5
+ 3: b6
+
annotate -n b
$ hg annotate -n b
@@ -217,6 +245,79 @@
3 b:5: b5
7 b:7: d
+--skip nothing (should be the same as no --skip at all)
+
+ $ hg annotate -nlf b --skip '1::0'
+ 0 a:1: a
+ 6 b:2: z
+ 1 a:3: a
+ 3 b:4: b4
+ 4 b:5: c
+ 3 b:5: b5
+ 7 b:7: d
+
+--skip a modified line. Note a slight behavior difference in pure - this is
+because the pure code comes up with slightly different deltas internally.
+
+ $ hg annotate -nlf b --skip 6
+ 0 a:1: a
+ 1 a:2: z (no-pure !)
+ 0 a:1: z (pure !)
+ 1 a:3: a
+ 3 b:4: b4
+ 4 b:5: c
+ 3 b:5: b5
+ 7 b:7: d
+
+--skip added lines (and test multiple skip)
+
+ $ hg annotate -nlf b --skip 3
+ 0 a:1: a
+ 6 b:2: z
+ 1 a:3: a
+ 1 a:3: b4
+ 4 b:5: c
+ 1 a:3: b5
+ 7 b:7: d
+
+ $ hg annotate -nlf b --skip 4
+ 0 a:1: a
+ 6 b:2: z
+ 1 a:3: a
+ 3 b:4: b4
+ 1 a:3: c
+ 3 b:5: b5
+ 7 b:7: d
+
+ $ hg annotate -nlf b --skip 3 --skip 4
+ 0 a:1: a
+ 6 b:2: z
+ 1 a:3: a
+ 1 a:3: b4
+ 1 a:3: c
+ 1 a:3: b5
+ 7 b:7: d
+
+ $ hg annotate -nlf b --skip 'merge()'
+ 0 a:1: a
+ 6 b:2: z
+ 1 a:3: a
+ 3 b:4: b4
+ 4 b:5: c
+ 3 b:5: b5
+ 3 b:5: d
+
+--skip everything -- use the revision the file was introduced in
+
+ $ hg annotate -nlf b --skip 'all()'
+ 0 a:1: a
+ 0 a:1: z
+ 0 a:1: a
+ 0 a:1: b4
+ 0 a:1: c
+ 0 a:1: b5
+ 0 a:1: d
+
Issue2807: alignment of line numbers with -l
$ echo more >> b
@@ -429,14 +530,9 @@
$ hg annotate -ncr "wdir()" -Tjson foo
[
{
- "line": "foo\n",
- "node": "472b18db256d1e8282064eab4bfdaf48cbfe83cd",
- "rev": 11
- },
- {
- "line": "foofoo\n",
- "node": null,
- "rev": null
+ "abspath": "foo",
+ "lines": [{"line": "foo\n", "node": "472b18db256d1e8282064eab4bfdaf48cbfe83cd", "rev": 11}, {"line": "foofoo\n", "node": null, "rev": null}],
+ "path": "foo"
}
]
@@ -671,6 +767,28 @@
hg: parse error: descend argument must be a boolean
[255]
+Test empty annotate output
+
+ $ printf '\0' > binary
+ $ touch empty
+ $ hg ci -qAm 'add binary and empty files'
+
+ $ hg annotate binary empty
+ binary: binary file
+
+ $ hg annotate -Tjson binary empty
+ [
+ {
+ "abspath": "binary",
+ "path": "binary"
+ },
+ {
+ "abspath": "empty",
+ "lines": [],
+ "path": "empty"
+ }
+ ]
+
Test annotate with whitespace options
$ cd ..
--- a/tests/test-archive.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-archive.t Tue Jun 20 16:33:46 2017 -0400
@@ -105,7 +105,7 @@
> except util.urlerr.httperror as e:
> sys.stderr.write(str(e) + '\n')
> EOF
- $ python getarchive.py "$TIP" gz | gunzip | tar tf - 2>/dev/null
+ $ $PYTHON getarchive.py "$TIP" gz | gunzip | tar tf - 2>/dev/null
test-archive-1701ef1f1510/.hg_archival.txt
test-archive-1701ef1f1510/.hgsub
test-archive-1701ef1f1510/.hgsubstate
@@ -113,7 +113,7 @@
test-archive-1701ef1f1510/baz/bletch
test-archive-1701ef1f1510/foo
test-archive-1701ef1f1510/subrepo/sub
- $ python getarchive.py "$TIP" bz2 | bunzip2 | tar tf - 2>/dev/null
+ $ $PYTHON getarchive.py "$TIP" bz2 | bunzip2 | tar tf - 2>/dev/null
test-archive-1701ef1f1510/.hg_archival.txt
test-archive-1701ef1f1510/.hgsub
test-archive-1701ef1f1510/.hgsubstate
@@ -121,7 +121,7 @@
test-archive-1701ef1f1510/baz/bletch
test-archive-1701ef1f1510/foo
test-archive-1701ef1f1510/subrepo/sub
- $ python getarchive.py "$TIP" zip > archive.zip
+ $ $PYTHON getarchive.py "$TIP" zip > archive.zip
$ unzip -t archive.zip
Archive: archive.zip
testing: test-archive-1701ef1f1510/.hg_archival.txt*OK (glob)
@@ -135,19 +135,19 @@
test that we can download single directories and files
- $ python getarchive.py "$TIP" gz baz | gunzip | tar tf - 2>/dev/null
+ $ $PYTHON getarchive.py "$TIP" gz baz | gunzip | tar tf - 2>/dev/null
test-archive-1701ef1f1510/baz/bletch
- $ python getarchive.py "$TIP" gz foo | gunzip | tar tf - 2>/dev/null
+ $ $PYTHON getarchive.py "$TIP" gz foo | gunzip | tar tf - 2>/dev/null
test-archive-1701ef1f1510/foo
test that we detect file patterns that match no files
- $ python getarchive.py "$TIP" gz foobar
+ $ $PYTHON getarchive.py "$TIP" gz foobar
HTTP Error 404: file(s) not found: foobar
test that we reject unsafe patterns
- $ python getarchive.py "$TIP" gz relre:baz
+ $ $PYTHON getarchive.py "$TIP" gz relre:baz
HTTP Error 404: file(s) not found: relre:baz
$ killdaemons.py
@@ -231,7 +231,7 @@
$ sleep 1
$ hg archive -t tgz tip.tar.gz
$ mv tip.tar.gz tip2.tar.gz
- $ python md5comp.py tip1.tar.gz tip2.tar.gz
+ $ $PYTHON md5comp.py tip1.tar.gz tip2.tar.gz
True
$ hg archive -t zip -p /illegal test.zip
@@ -364,12 +364,12 @@
$ hg -R repo archive --prefix tar-extracted archive.tar
$ (TZ=UTC-3; export TZ; tar xf archive.tar)
- $ python show_mtime.py tar-extracted/a
+ $ $PYTHON show_mtime.py tar-extracted/a
456789012
$ hg -R repo archive --prefix zip-extracted archive.zip
$ (TZ=UTC-3; export TZ; unzip -q archive.zip)
- $ python show_mtime.py zip-extracted/a
+ $ $PYTHON show_mtime.py zip-extracted/a
456789012
$ cd ..
--- a/tests/test-atomictempfile.py Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-atomictempfile.py Tue Jun 20 16:33:46 2017 -0400
@@ -47,7 +47,8 @@
# if a programmer screws up and passes bad args to atomictempfile, they
# get a plain ordinary TypeError, not infinite recursion
def testoops(self):
- self.assertRaises(TypeError, atomictempfile)
+ with self.assertRaises(TypeError):
+ atomictempfile()
# checkambig=True avoids ambiguity of timestamp
def testcheckambig(self):
--- a/tests/test-bad-extension.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-bad-extension.t Tue Jun 20 16:33:46 2017 -0400
@@ -62,9 +62,9 @@
names of extensions failed to load can be accessed via extensions.notloaded()
$ cat <<EOF > showbadexts.py
- > from mercurial import cmdutil, commands, extensions
+ > from mercurial import commands, extensions, registrar
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
> @command('showbadexts', norepo=True)
> def showbadexts(ui, *pats, **opts):
> ui.write('BADEXTS: %s\n' % ' '.join(sorted(extensions.notloaded())))
--- a/tests/test-bad-pull.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-bad-pull.t Tue Jun 20 16:33:46 2017 -0400
@@ -7,7 +7,7 @@
$ test -d copy
[1]
- $ python "$TESTDIR/dumbhttp.py" -p $HGPORT --pid dumb.pid
+ $ $PYTHON "$TESTDIR/dumbhttp.py" -p $HGPORT --pid dumb.pid
$ cat dumb.pid >> $DAEMON_PIDS
$ hg clone http://localhost:$HGPORT/foo copy2
abort: HTTP Error 404: * (glob)
--- a/tests/test-basic.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-basic.t Tue Jun 20 16:33:46 2017 -0400
@@ -1,11 +1,8 @@
Create a repository:
$ hg config
- defaults.backout=-d "0 0"
- defaults.commit=-d "0 0"
- defaults.shelve=--date "0 0"
- defaults.tag=-d "0 0"
devel.all-warnings=true
+ devel.default-date=0 0
largefiles.usercache=$TESTTMP/.cache/largefiles (glob)
ui.slash=True
ui.interactive=False
@@ -63,7 +60,7 @@
> EOF
$ hg up null
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
- $ python ./update_to_rev0.py
+ $ $PYTHON ./update_to_rev0.py
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg identify -n
0
--- a/tests/test-bdiff.py Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-bdiff.py Tue Jun 20 16:33:46 2017 -0400
@@ -4,8 +4,7 @@
import unittest
from mercurial import (
- bdiff,
- mpatch,
+ mdiff,
)
class diffreplace(
@@ -16,10 +15,10 @@
class BdiffTests(unittest.TestCase):
def assert_bdiff_applies(self, a, b):
- d = bdiff.bdiff(a, b)
+ d = mdiff.textdiff(a, b)
c = a
if d:
- c = mpatch.patches(a, [d])
+ c = mdiff.patches(a, [d])
self.assertEqual(
c, b, ("bad diff+patch result from\n %r to\n "
"%r: \nbdiff: %r\npatched: %r" % (a, b, d, c[:200])))
@@ -54,7 +53,7 @@
self.assert_bdiff(a, b)
def showdiff(self, a, b):
- bin = bdiff.bdiff(a, b)
+ bin = mdiff.textdiff(a, b)
pos = 0
q = 0
actions = []
@@ -110,7 +109,7 @@
("", "", 0),
]
for a, b, allws in cases:
- c = bdiff.fixws(a, allws)
+ c = mdiff.fixws(a, allws)
self.assertEqual(
c, b, 'fixws(%r) want %r got %r (allws=%r)' % (a, b, c, allws))
--- a/tests/test-bisect.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-bisect.t Tue Jun 20 16:33:46 2017 -0400
@@ -453,7 +453,7 @@
test bisecting command
$ cat > script.py <<EOF
- > #!/usr/bin/env python
+ > #!$PYTHON
> import sys
> from mercurial import ui, hg
> repo = hg.repository(ui.ui.load(), '.')
@@ -463,12 +463,12 @@
$ chmod +x script.py
$ hg bisect -r
$ hg up -qr tip
- $ hg bisect --command "python \"$TESTTMP/script.py\" and some parameters"
+ $ hg bisect --command "$PYTHON \"$TESTTMP/script.py\" and some parameters"
changeset 31:58c80a7c8a40: good
abort: cannot bisect (no known bad revisions)
[255]
$ hg up -qr 0
- $ hg bisect --command "python \"$TESTTMP/script.py\" and some parameters"
+ $ hg bisect --command "$PYTHON \"$TESTTMP/script.py\" and some parameters"
changeset 0:b99c7b9c8e11: bad
changeset 15:e7fa0811edb0: good
changeset 7:03750880c6b5: good
@@ -551,7 +551,14 @@
date: Thu Jan 01 00:00:06 1970 +0000
summary: msg 6
-
+ $ hg graft -q 15
+ warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
+ abort: unresolved conflicts, can't continue
+ (use 'hg resolve' and 'hg graft --continue')
+ [255]
+ $ hg bisect --reset
+ $ hg up -C .
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
Check that bisect does not break on obsolete changesets
=========================================================
@@ -604,3 +611,51 @@
date: Thu Jan 01 00:00:26 1970 +0000
summary: msg 26
+Test the validation message when exclusive options are used:
+
+ $ hg bisect -r
+ $ hg bisect -b -c false
+ abort: --bad and --command are incompatible
+ [255]
+ $ hg bisect -b -e
+ abort: --bad and --extend are incompatible
+ [255]
+ $ hg bisect -b -g
+ abort: --bad and --good are incompatible
+ [255]
+ $ hg bisect -b -r
+ abort: --bad and --reset are incompatible
+ [255]
+ $ hg bisect -b -s
+ abort: --bad and --skip are incompatible
+ [255]
+ $ hg bisect -c false -e
+ abort: --command and --extend are incompatible
+ [255]
+ $ hg bisect -c false -g
+ abort: --command and --good are incompatible
+ [255]
+ $ hg bisect -c false -r
+ abort: --command and --reset are incompatible
+ [255]
+ $ hg bisect -c false -s
+ abort: --command and --skip are incompatible
+ [255]
+ $ hg bisect -e -g
+ abort: --extend and --good are incompatible
+ [255]
+ $ hg bisect -e -r
+ abort: --extend and --reset are incompatible
+ [255]
+ $ hg bisect -e -s
+ abort: --extend and --skip are incompatible
+ [255]
+ $ hg bisect -g -r
+ abort: --good and --reset are incompatible
+ [255]
+ $ hg bisect -g -s
+ abort: --good and --skip are incompatible
+ [255]
+ $ hg bisect -r -s
+ abort: --reset and --skip are incompatible
+ [255]
--- a/tests/test-bisect2.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-bisect2.t Tue Jun 20 16:33:46 2017 -0400
@@ -244,6 +244,7 @@
$ hg up -C
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "d42e18c7bc9b: 18"
3 other heads for branch "default"
complex bisect test 1 # first bad rev is 9
--- a/tests/test-blackbox.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-blackbox.t Tue Jun 20 16:33:46 2017 -0400
@@ -137,6 +137,7 @@
$ hg update
hooked
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "d02f48003e62: c"
1 other heads for branch "default"
$ hg blackbox -l 6
1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> update
@@ -171,7 +172,7 @@
$ sed -e 's/\(.*test1.*\)/#\1/; s#\(.*commit2.*\)#os.rmdir(".hg/blackbox.log")\
> os.rename(".hg/blackbox.log-", ".hg/blackbox.log")\
> \1#' $TESTDIR/test-dispatch.py > ../test-dispatch.py
- $ python $TESTDIR/blackbox-readonly-dispatch.py
+ $ $PYTHON $TESTDIR/blackbox-readonly-dispatch.py
running: add foo
result: 0
running: commit -m commit1 -d 2000-01-01 foo
@@ -195,8 +196,8 @@
result: None
$ hg blackbox
1970/01/01 00:00:00 bob @0e46349438790c460c5c9f7546bfcd39b267bbd2 (5000)> commit -m commit2 -d 2000-01-02 foo
- 1970/01/01 00:00:00 bob @0e46349438790c460c5c9f7546bfcd39b267bbd2 (5000)> updated served branch cache in * seconds (glob)
- 1970/01/01 00:00:00 bob @0e46349438790c460c5c9f7546bfcd39b267bbd2 (5000)> wrote served branch cache with 1 labels and 1 nodes
+ 1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> updated served branch cache in * seconds (glob)
+ 1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> wrote served branch cache with 1 labels and 1 nodes
1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> commit -m commit2 -d 2000-01-02 foo exited 0 after * seconds (glob)
1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> log -r 0
1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> writing .hg/cache/tags2-visible with 0 tags
--- a/tests/test-bookmarks-pushpull.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-bookmarks-pushpull.t Tue Jun 20 16:33:46 2017 -0400
@@ -203,7 +203,7 @@
(test that too many divergence of bookmark)
- $ python $TESTDIR/seq.py 1 100 | while read i; do hg bookmarks -r 000000000000 "X@${i}"; done
+ $ $PYTHON $TESTDIR/seq.py 1 100 | while read i; do hg bookmarks -r 000000000000 "X@${i}"; done
$ hg pull ../a
pulling from ../a
searching for changes
@@ -231,7 +231,7 @@
@1 2:0d2164f0ce0d
@foo 2:0d2164f0ce0d
- $ python $TESTDIR/seq.py 1 100 | while read i; do hg bookmarks -d "X@${i}"; done
+ $ $PYTHON $TESTDIR/seq.py 1 100 | while read i; do hg bookmarks -d "X@${i}"; done
$ hg bookmarks -d "@1"
$ hg push -f ../a
--- a/tests/test-bookmarks.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-bookmarks.t Tue Jun 20 16:33:46 2017 -0400
@@ -311,8 +311,45 @@
abort: cannot use an integer as a name
[255]
+bookmark with a name that matches a node id
+ $ hg bookmark 925d80f479bb db815d6d32e6
+ bookmark 925d80f479bb matches a changeset hash
+ (did you leave a -r out of an 'hg bookmark' command?)
+ bookmark db815d6d32e6 matches a changeset hash
+ (did you leave a -r out of an 'hg bookmark' command?)
+ $ hg bookmark -d 925d80f479bb
+ $ hg bookmark -d db815d6d32e6
+
+ $ cd ..
+
+bookmark with a name that matches an ambiguous node id
+
+ $ hg init ambiguous
+ $ cd ambiguous
+ $ echo 0 > a
+ $ hg ci -qAm 0
+ $ for i in 1057 2857 4025; do
+ > hg up -q 0
+ > echo $i > a
+ > hg ci -qm $i
+ > done
+ $ hg up -q null
+ $ hg log -r0: -T '{rev}:{node}\n'
+ 0:b4e73ffab476aa0ee32ed81ca51e07169844bc6a
+ 1:c56256a09cd28e5764f32e8e2810d0f01e2e357a
+ 2:c5623987d205cd6d9d8389bfc40fff9dbb670b48
+ 3:c562ddd9c94164376c20b86b0b4991636a3bf84f
+
+ $ hg bookmark -r0 c562
+ $ hg bookmarks
+ c562 0:b4e73ffab476
+
+ $ cd ..
+
incompatible options
+ $ cd repo
+
$ hg bookmark -m Y -d Z
abort: --delete and --rename are incompatible
[255]
@@ -663,7 +700,7 @@
test missing revisions
- $ echo "925d80f479bc z" > .hg/bookmarks
+ $ echo "925d80f479b925d80f479bc925d80f479bccabab z" > .hg/bookmarks
$ hg book
no bookmarks set
--- a/tests/test-bundle.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-bundle.t Tue Jun 20 16:33:46 2017 -0400
@@ -302,6 +302,20 @@
$ hg debugbundle --spec packednongd.hg
none-packed1;requirements%3Drevlogv1
+Warning emitted when packed bundles contain secret changesets
+
+ $ hg init testsecret
+ $ cd testsecret
+ $ touch foo
+ $ hg -q commit -A -m initial
+ $ hg phase --force --secret -r .
+ $ cd ..
+
+ $ hg -R testsecret debugcreatestreamclonebundle packedsecret.hg
+ (warning: stream clone bundle will contain secret revisions)
+ writing 301 bytes for 3 files
+ bundle requirements: generaldelta, revlogv1
+
Unpacking packed1 bundles with "hg unbundle" isn't allowed
$ hg init packed
--- a/tests/test-bundle2-format.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-bundle2-format.t Tue Jun 20 16:33:46 2017 -0400
@@ -14,7 +14,6 @@
> """
>
> import sys, os, gc
- > from mercurial import cmdutil
> from mercurial import util
> from mercurial import bundle2
> from mercurial import scmutil
@@ -22,6 +21,7 @@
> from mercurial import changegroup
> from mercurial import error
> from mercurial import obsolete
+ > from mercurial import registrar
>
>
> try:
@@ -33,7 +33,7 @@
> pass
>
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
>
> ELEPHANTSSONG = """Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
> Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
@@ -113,7 +113,7 @@
> headmissing = [c.node() for c in repo.set('heads(%ld)', revs)]
> headcommon = [c.node() for c in repo.set('parents(%ld) - %ld', revs, revs)]
> outgoing = discovery.outgoing(repo, headcommon, headmissing)
- > cg = changegroup.getlocalchangegroup(repo, 'test:bundle2', outgoing, None)
+ > cg = changegroup.getchangegroup(repo, 'test:bundle2', outgoing, None)
> bundler.newpart('changegroup', data=cg.getchunks(),
> mandatory=False)
>
--- a/tests/test-bundle2-remote-changegroup.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-bundle2-remote-changegroup.t Tue Jun 20 16:33:46 2017 -0400
@@ -74,7 +74,7 @@
Start a simple HTTP server to serve bundles
- $ python "$TESTDIR/dumbhttp.py" -p $HGPORT --pid dumb.pid
+ $ $PYTHON "$TESTDIR/dumbhttp.py" -p $HGPORT --pid dumb.pid
$ cat dumb.pid >> $DAEMON_PIDS
$ cat >> $HGRCPATH << EOF
--- a/tests/test-cache-abuse.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-cache-abuse.t Tue Jun 20 16:33:46 2017 -0400
@@ -70,10 +70,6 @@
$ damage tags tags2-visible
$ damage "tag -f t3" hgtagsfnodes1
-Beat up hidden cache:
-
- $ damage log hidden
-
Beat up branch caches:
$ damage branches branch2-base "rm .hg/cache/branch2-[vs]*"
--- a/tests/test-casefolding.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-casefolding.t Tue Jun 20 16:33:46 2017 -0400
@@ -9,7 +9,6 @@
$ cd repo1
$ echo a > a
$ hg add A
- adding a
$ hg st
A a
$ hg ci -m adda
@@ -28,6 +27,7 @@
a
committing manifest
committing changelog
+ updating the branch cache
committed changeset 0:07f4944404050f47db2e5c5071e0e84e7a27bba9
Case-changing renames should work:
@@ -70,14 +70,12 @@
A D/c
$ hg ci -m addc D/c
$ hg mv d/b d/e
- moving D/b to D/e (glob)
$ hg st
A D/e
R D/b
$ hg revert -aq
$ rm d/e
$ hg mv d/b D/B
- moving D/b to D/B (glob)
$ hg st
A D/B
R D/b
--- a/tests/test-cat.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-cat.t Tue Jun 20 16:33:46 2017 -0400
@@ -63,6 +63,46 @@
tmp/h_45116003780e
tmp/r_2
+Test template output
+
+ $ hg --cwd tmp cat ../b ../c -T '== {path} ({abspath}) ==\n{data}'
+ == ../b (b) == (glob)
+ 1
+ == ../c (c) == (glob)
+ 3
+
+ $ hg cat b c -Tjson --output -
+ [
+ {
+ "abspath": "b",
+ "data": "1\n",
+ "path": "b"
+ },
+ {
+ "abspath": "c",
+ "data": "3\n",
+ "path": "c"
+ }
+ ]
+
+ $ hg cat b c -Tjson --output 'tmp/%p.json'
+ $ cat tmp/b.json
+ [
+ {
+ "abspath": "b",
+ "data": "1\n",
+ "path": "b"
+ }
+ ]
+ $ cat tmp/c.json
+ [
+ {
+ "abspath": "c",
+ "data": "3\n",
+ "path": "c"
+ }
+ ]
+
Test working directory
$ echo b-wdir > b
--- a/tests/test-censor.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-censor.t Tue Jun 20 16:33:46 2017 -0400
@@ -304,7 +304,7 @@
Can censor after revlog has expanded to no longer permit inline storage
- $ for x in `python $TESTDIR/seq.py 0 50000`
+ $ for x in `$PYTHON $TESTDIR/seq.py 0 50000`
> do
> echo "Password: hunter$x" >> target
> done
--- a/tests/test-check-code.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-check-code.t Tue Jun 20 16:33:46 2017 -0400
@@ -9,39 +9,11 @@
$ hg locate -X contrib/python-zstandard -X hgext/fsmonitor/pywatchman |
> sed 's-\\-/-g' | "$check_code" --warnings --per-file=0 - || false
- contrib/perf.py:869:
- > r.revision(r.node(x))
- don't convert rev to node before passing to revision(nodeorrev)
Skipping i18n/polib.py it has no-che?k-code (glob)
- mercurial/demandimport.py:313:
- > if os.environ.get('HGDEMANDIMPORT') != 'disable':
- use encoding.environ instead (py3)
- mercurial/encoding.py:54:
- > environ = os.environ
- use encoding.environ instead (py3)
- mercurial/encoding.py:56:
- > environ = os.environb
- use encoding.environ instead (py3)
- mercurial/encoding.py:61:
- > for k, v in os.environ.items())
- use encoding.environ instead (py3)
- mercurial/encoding.py:221:
- > for k, v in os.environ.items())
- use encoding.environ instead (py3)
Skipping mercurial/httpclient/__init__.py it has no-che?k-code (glob)
Skipping mercurial/httpclient/_readers.py it has no-che?k-code (glob)
- mercurial/policy.py:46:
- > if 'HGMODULEPOLICY' in os.environ:
- use encoding.environ instead (py3)
- mercurial/policy.py:47:
- > policy = os.environ['HGMODULEPOLICY'].encode('utf-8')
- use encoding.environ instead (py3)
- mercurial/policy.py:49:
- > policy = os.environ.get('HGMODULEPOLICY', policy)
- use encoding.environ instead (py3)
Skipping mercurial/statprof.py it has no-che?k-code (glob)
Skipping tests/badserverext.py it has no-che?k-code (glob)
- [1]
@commands in debugcommands.py should be in alphabetical order.
--- a/tests/test-check-commit.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-check-commit.t Tue Jun 20 16:33:46 2017 -0400
@@ -9,7 +9,7 @@
$ cd $TESTDIR/..
$ for node in `hg log --rev 'not public() and ::. and not desc("# no-check-commit")' --template '{node|short}\n'`; do
- > hg export $node | contrib/check-commit > ${TESTTMP}/check-commit.out
+ > hg export --git $node | contrib/check-commit > ${TESTTMP}/check-commit.out
> if [ $? -ne 0 ]; then
> echo "Revision $node does not comply with rules"
> echo '------------------------------------------------------'
--- a/tests/test-check-config.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-check-config.t Tue Jun 20 16:33:46 2017 -0400
@@ -1,9 +1,38 @@
#require test-repo
$ . "$TESTDIR/helpers-testrepo.sh"
+
+Sanity check check-config.py
+
+ $ cat > testfile.py << EOF
+ > # Good
+ > foo = ui.config('ui', 'username')
+ > # Missing
+ > foo = ui.config('ui', 'doesnotexist')
+ > # Missing different type
+ > foo = ui.configint('ui', 'missingint')
+ > # Missing with default value
+ > foo = ui.configbool('ui', 'missingbool1', default=True)
+ > foo = ui.configbool('ui', 'missingbool2', False)
+ > EOF
+
+ $ cat > files << EOF
+ > mercurial/help/config.txt
+ > $TESTTMP/testfile.py
+ > EOF
+
$ cd "$TESTDIR"/..
+ $ $PYTHON contrib/check-config.py < $TESTTMP/files
+ undocumented: ui.doesnotexist (str)
+ undocumented: ui.missingbool1 (bool) [True]
+ undocumented: ui.missingbool2 (bool)
+ undocumented: ui.missingint (int)
+
New errors are not allowed. Warnings are strongly discouraged.
$ hg files "set:(**.py or **.txt) - tests/**" | sed 's|\\|/|g' |
- > python contrib/check-config.py
+ > $PYTHON contrib/check-config.py
+ limit = ui.configwith(fraction, 'profiling', 'showmin', 0.05)
+
+ conflict on profiling.showmin: ('with', '0.05') != ('with', '0.005')
--- a/tests/test-check-help.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-check-help.t Tue Jun 20 16:33:46 2017 -0400
@@ -23,6 +23,6 @@
Check if ":hg:`help TOPIC`" is valid:
(use "xargs -n1 -t" to see which help commands are executed)
- $ hg files 'glob:{hgext,mercurial}/**/*.py' | sed 's|\\|/|g' \
- > | xargs python "$TESTTMP/scanhelptopics.py" \
+ $ hg files 'glob:{hgdemandimport,hgext,mercurial}/**/*.py' | sed 's|\\|/|g' \
+ > | xargs $PYTHON "$TESTTMP/scanhelptopics.py" \
> | xargs -n1 hg help > /dev/null
--- a/tests/test-check-py3-commands.t Tue Jun 13 22:24:41 2017 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,161 +0,0 @@
-#require py3exe
-
-This test helps in keeping a track on which commands we can run on
-Python 3 and see what kind of errors are coming up.
-The full traceback is hidden to have a stable output.
- $ HGBIN=`which hg`
-
- $ for cmd in version debuginstall ; do
- > echo $cmd
- > $PYTHON3 $HGBIN $cmd 2>&1 2>&1 | tail -1
- > done
- version
- warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
- debuginstall
- no problems detected
-
-#if test-repo
-Make a clone so that any features in the developer's .hg/hgrc that
-might confuse Python 3 don't break this test. When we can do commit in
-Python 3, we'll stop doing this. We use e76ed1e480ef for the clone
-because it has different files than 273ce12ad8f1, so we can test both
-`files` from dirstate and `files` loaded from a specific revision.
-
- $ hg clone -r e76ed1e480ef "`dirname "$TESTDIR"`" testrepo 2>&1 | tail -1
- 15 files updated, 0 files merged, 0 files removed, 0 files unresolved
-
-Test using -R, which exercises some URL code:
- $ $PYTHON3 $HGBIN -R testrepo files -r 273ce12ad8f1 | tail -1
- testrepo/tkmerge
-
-Now prove `hg files` is reading the whole manifest. We have to grep
-out some potential warnings that come from hgrc as yet.
- $ cd testrepo
- $ $PYTHON3 $HGBIN files -r 273ce12ad8f1
- .hgignore
- PKG-INFO
- README
- hg
- mercurial/__init__.py
- mercurial/byterange.py
- mercurial/fancyopts.py
- mercurial/hg.py
- mercurial/mdiff.py
- mercurial/revlog.py
- mercurial/transaction.py
- notes.txt
- setup.py
- tkmerge
-
- $ $PYTHON3 $HGBIN files -r 273ce12ad8f1 | wc -l
- \s*14 (re)
- $ $PYTHON3 $HGBIN files | wc -l
- \s*15 (re)
-
-Test if log-like commands work:
-
- $ $PYTHON3 $HGBIN tip
- changeset: 10:e76ed1e480ef
- tag: tip
- user: oxymoron@cinder.waste.org
- date: Tue May 03 23:37:43 2005 -0800
- summary: Fix linking of changeset revs when merging
-
-
- $ $PYTHON3 $HGBIN log -r0
- changeset: 0:9117c6561b0b
- user: mpm@selenic.com
- date: Tue May 03 13:16:10 2005 -0800
- summary: Add back links from file revisions to changeset revisions
-
-
- $ cd ..
-#endif
-
-Test if `hg config` works:
-
- $ $PYTHON3 $HGBIN config
- defaults.backout=-d "0 0"
- defaults.commit=-d "0 0"
- defaults.shelve=--date "0 0"
- defaults.tag=-d "0 0"
- devel.all-warnings=true
- largefiles.usercache=$TESTTMP/.cache/largefiles
- ui.slash=True
- ui.interactive=False
- ui.mergemarkers=detailed
- ui.promptecho=True
- web.address=localhost
- web.ipv6=False
-
- $ cat > included-hgrc <<EOF
- > [extensions]
- > babar = imaginary_elephant
- > EOF
- $ cat >> $HGRCPATH <<EOF
- > %include $TESTTMP/included-hgrc
- > EOF
- $ $PYTHON3 $HGBIN version | tail -1
- *** failed to import extension babar from imaginary_elephant: *: 'imaginary_elephant' (glob)
- warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-
- $ rm included-hgrc
- $ touch included-hgrc
-
-Test bytes-ness of policy.policy with HGMODULEPOLICY
-
- $ HGMODULEPOLICY=py
- $ export HGMODULEPOLICY
- $ $PYTHON3 `which hg` debuginstall 2>&1 2>&1 | tail -1
- no problems detected
-
-`hg init` can create empty repos
-`hg status works fine`
-`hg summary` also works!
-
- $ $PYTHON3 `which hg` init py3repo
- $ cd py3repo
- $ echo "This is the file 'iota'." > iota
- $ $PYTHON3 $HGBIN status
- ? iota
- $ $PYTHON3 $HGBIN add iota
- $ $PYTHON3 $HGBIN status
- A iota
- $ $PYTHON3 $HGBIN commit --message 'commit performed in Python 3'
- $ $PYTHON3 $HGBIN status
-
- $ mkdir A
- $ echo "This is the file 'mu'." > A/mu
- $ $PYTHON3 $HGBIN addremove
- adding A/mu
- $ $PYTHON3 $HGBIN status
- A A/mu
- $ HGEDITOR='echo message > ' $PYTHON3 $HGBIN commit
- $ $PYTHON3 $HGBIN status
- $ $PYHON3 $HGBIN summary
- parent: 1:e1e9167203d4 tip
- message
- branch: default
- commit: (clean)
- update: (current)
- phases: 2 draft
-
-Prove the repo is valid using the Python 2 `hg`:
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- 2 files, 2 changesets, 2 total revisions
- $ hg log
- changeset: 1:e1e9167203d4
- tag: tip
- user: test
- date: Thu Jan 01 00:00:00 1970 +0000
- summary: message
-
- changeset: 0:71c96e924262
- user: test
- date: Thu Jan 01 00:00:00 1970 +0000
- summary: commit performed in Python 3
-
--- a/tests/test-check-py3-compat.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-check-py3-compat.t Tue Jun 20 16:33:46 2017 -0400
@@ -3,7 +3,7 @@
$ . "$TESTDIR/helpers-testrepo.sh"
$ cd "$TESTDIR"/..
- $ hg files 'set:(**.py)' | sed 's|\\|/|g' | xargs python contrib/check-py3-compat.py
+ $ hg files 'set:(**.py)' | sed 's|\\|/|g' | xargs $PYTHON contrib/check-py3-compat.py
contrib/python-zstandard/setup.py not using absolute_import
contrib/python-zstandard/setup_zstd.py not using absolute_import
contrib/python-zstandard/tests/common.py not using absolute_import
@@ -26,11 +26,11 @@
> | sed 's|\\|/|g' | xargs $PYTHON3 contrib/check-py3-compat.py \
> | sed 's/[0-9][0-9]*)$/*)/'
hgext/convert/transport.py: error importing: <*Error> No module named 'svn.client' (error at transport.py:*) (glob)
- hgext/fsmonitor/state.py: error importing: <SyntaxError> from __future__ imports must occur at the beginning of the file (__init__.py, line 30) (error at __init__.py:*)
- hgext/fsmonitor/watchmanclient.py: error importing: <SyntaxError> from __future__ imports must occur at the beginning of the file (__init__.py, line 30) (error at __init__.py:*)
- mercurial/cffi/bdiff.py: error importing: <*Error> No module named 'mercurial.cffi' (error at check-py3-compat.py:*) (glob)
- mercurial/cffi/mpatch.py: error importing: <*Error> No module named 'mercurial.cffi' (error at check-py3-compat.py:*) (glob)
- mercurial/cffi/osutil.py: error importing: <*Error> No module named 'mercurial.cffi' (error at check-py3-compat.py:*) (glob)
+ mercurial/cffi/bdiff.py: error importing: <ImportError> cannot import name '_bdiff' (error at bdiff.py:*)
+ mercurial/cffi/bdiffbuild.py: error importing: <ImportError> No module named 'cffi' (error at bdiffbuild.py:*)
+ mercurial/cffi/mpatch.py: error importing: <ImportError> cannot import name '_mpatch' (error at mpatch.py:*)
+ mercurial/cffi/mpatchbuild.py: error importing: <ImportError> No module named 'cffi' (error at mpatchbuild.py:*)
+ mercurial/cffi/osutilbuild.py: error importing: <ImportError> No module named 'cffi' (error at osutilbuild.py:*)
mercurial/scmwindows.py: error importing: <*Error> No module named 'msvcrt' (error at win32.py:*) (glob)
mercurial/win32.py: error importing: <*Error> No module named 'msvcrt' (error at win32.py:*) (glob)
mercurial/windows.py: error importing: <*Error> No module named 'msvcrt' (error at windows.py:*) (glob)
--- a/tests/test-check-pyflakes.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-check-pyflakes.t Tue Jun 20 16:33:46 2017 -0400
@@ -10,5 +10,5 @@
> -X mercurial/pycompat.py -X contrib/python-zstandard \
> 2>/dev/null \
> | xargs pyflakes 2>/dev/null | "$TESTDIR/filterpyflakes.py"
- tests/filterpyflakes.py:39: undefined name 'undefinedname'
+ tests/filterpyflakes.py:41: undefined name 'undefinedname'
--- a/tests/test-check-pylint.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-check-pylint.t Tue Jun 20 16:33:46 2017 -0400
@@ -12,7 +12,7 @@
$ touch $TESTTMP/fakerc
$ pylint --rcfile=$TESTTMP/fakerc --disable=all \
> --enable=W0102 --reports=no \
- > mercurial hgext hgext3rd
+ > mercurial hgdemandimport hgext hgext3rd
(?)
------------------------------------ (?)
Your code has been rated at 10.00/10 (?)
--- a/tests/test-check-shbang.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-check-shbang.t Tue Jun 20 16:33:46 2017 -0400
@@ -5,9 +5,20 @@
look for python scripts that do not use /usr/bin/env
- $ hg files 'set:grep(r"^#!.*?python") and not grep(r"^#!/usr/bi{1}n/env python")'
+ $ hg files 'set:grep(r"^#!.*?python") and not grep(r"^#!/usr/bi{1}n/env python") - **/*.t'
[1]
+In tests, enforce $PYTHON and *not* /usr/bin/env python or similar:
+ $ hg files 'set:grep(r"#!.*?python") and **/*.t' \
+ > -X tests/test-check-execute.t \
+ > -X tests/test-check-module-imports.t \
+ > -X tests/test-check-pyflakes.t \
+ > -X tests/test-check-shbang.t
+ [1]
+
+The above exclusions are because they're looking for files that
+contain Python but don't end in .py - please avoid adding more.
+
look for shell scripts that do not use /bin/sh
$ hg files 'set:grep(r"^#!.*/bi{1}n/sh") and not grep(r"^#!/bi{1}n/sh")'
--- a/tests/test-chg.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-chg.t Tue Jun 20 16:33:46 2017 -0400
@@ -89,7 +89,7 @@
> [extensions]
> pager =
> [pager]
- > pager = python $TESTTMP/fakepager.py
+ > pager = $PYTHON $TESTTMP/fakepager.py
> EOF
$ chg version > /dev/null
$ touch foo
@@ -105,9 +105,9 @@
chg waits for pager if runcommand raises
$ cat > $TESTTMP/crash.py <<EOF
- > from mercurial import cmdutil
+ > from mercurial import registrar
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
> @command('crash')
> def pagercrash(ui, repo, *pats, **opts):
> ui.write('going to crash\n')
--- a/tests/test-clone-cgi.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-clone-cgi.t Tue Jun 20 16:33:46 2017 -0400
@@ -27,7 +27,7 @@
$ . "$TESTDIR/cgienv"
$ QUERY_STRING="cmd=changegroup&roots=0000000000000000000000000000000000000000"; export QUERY_STRING
$ python hgweb.cgi >page1 2>&1
- $ python "$TESTDIR/md5sum.py" page1
+ $ $PYTHON "$TESTDIR/md5sum.py" page1
1f424bb22ec05c3c6bc866b6e67efe43 page1
make sure headers are sent even when there is no body
--- a/tests/test-clone-uncompressed.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-clone-uncompressed.t Tue Jun 20 16:33:46 2017 -0400
@@ -49,6 +49,77 @@
bundle2-input-bundle: 1 parts total
checking for updated bookmarks
+Cannot stream clone when there are secret changesets
+
+ $ hg -R server phase --force --secret -r tip
+ $ hg clone --uncompressed -U http://localhost:$HGPORT secret-denied
+ warning: stream clone requested but server has them disabled
+ requesting all changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+
+ $ killdaemons.py
+
+Streaming of secrets can be overridden by server config
+
+ $ cd server
+ $ hg --config server.uncompressedallowsecret=true serve -p $HGPORT -d --pid-file=hg.pid
+ $ cat hg.pid > $DAEMON_PIDS
+ $ cd ..
+
+ $ hg clone --uncompressed -U http://localhost:$HGPORT secret-allowed
+ streaming all changes
+ 1027 files to transfer, 96.3 KB of data
+ transferred 96.3 KB in * seconds (*/sec) (glob)
+ searching for changes
+ no changes found
+
+ $ killdaemons.py
+
+Verify interaction between preferuncompressed and secret presence
+
+ $ cd server
+ $ hg --config server.preferuncompressed=true serve -p $HGPORT -d --pid-file=hg.pid
+ $ cat hg.pid > $DAEMON_PIDS
+ $ cd ..
+
+ $ hg clone -U http://localhost:$HGPORT preferuncompressed-secret
+ requesting all changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+
+ $ killdaemons.py
+
+Clone not allowed when full bundles disabled and can't serve secrets
+
+ $ cd server
+ $ hg --config server.disablefullbundle=true serve -p $HGPORT -d --pid-file=hg.pid
+ $ cat hg.pid > $DAEMON_PIDS
+ $ cd ..
+
+ $ hg clone --uncompressed http://localhost:$HGPORT secret-full-disabled
+ warning: stream clone requested but server has them disabled
+ requesting all changes
+ remote: abort: server has pull-based clones disabled
+ abort: pull failed on remote
+ (remove --pull if specified or upgrade Mercurial)
+ [255]
+
+Local stream clone with secrets involved
+(This is just a test over behavior: if you have access to the repo's files,
+there is no security so it isn't important to prevent a clone here.)
+
+ $ hg clone -U --uncompressed server local-secret
+ warning: stream clone requested but server has them disabled
+ requesting all changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
Stream clone while repo is changing:
--- a/tests/test-clone.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-clone.t Tue Jun 20 16:33:46 2017 -0400
@@ -52,6 +52,8 @@
checkisexec (execbit !)
checklink (symlink !)
checklink-target (symlink !)
+ rbc-names-v1
+ rbc-revs-v1
$ cat a
a
@@ -99,6 +101,8 @@
$ ls .hg/cache
branch2-served
+ rbc-names-v1
+ rbc-revs-v1
$ cat a 2>/dev/null || echo "a not present"
a not present
@@ -520,7 +524,7 @@
> hg.clone(myui, {}, repo, dest="ua")
> EOF
- $ python simpleclone.py
+ $ $PYTHON simpleclone.py
updating to branch default
3 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -534,7 +538,7 @@
> hg.clone(myui, {}, repo, dest="ua", branch=["stable",])
> EOF
- $ python branchclone.py
+ $ $PYTHON branchclone.py
adding changesets
adding manifests
adding file changes
--- a/tests/test-clonebundles.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-clonebundles.t Tue Jun 20 16:33:46 2017 -0400
@@ -51,7 +51,8 @@
$ echo 'http://does.not.exist/bundle.hg' > server/.hg/clonebundles.manifest
$ hg clone http://localhost:$HGPORT 404-url
applying clone bundle from http://does.not.exist/bundle.hg
- error fetching bundle: (.* not known|getaddrinfo failed|No address associated with hostname) (re)
+ error fetching bundle: (.* not known|No address associated with hostname) (re) (no-windows !)
+ error fetching bundle: [Errno 11004] getaddrinfo failed (windows !)
abort: error applying bundle
(if this error persists, consider contacting the server operator or disable clone bundles via "--config ui.clonebundles=false")
[255]
@@ -68,7 +69,7 @@
Server returns 404
- $ python $TESTDIR/dumbhttp.py -p $HGPORT1 --pid http.pid
+ $ $PYTHON $TESTDIR/dumbhttp.py -p $HGPORT1 --pid http.pid
$ cat http.pid >> $DAEMON_PIDS
$ hg clone http://localhost:$HGPORT running-404
applying clone bundle from http://localhost:$HGPORT1/bundle.hg
--- a/tests/test-command-template.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-command-template.t Tue Jun 20 16:33:46 2017 -0400
@@ -209,14 +209,29 @@
Add some simple styles to settings
- $ echo '[templates]' >> .hg/hgrc
- $ printf 'simple = "{rev}\\n"\n' >> .hg/hgrc
- $ printf 'simple2 = {rev}\\n\n' >> .hg/hgrc
+ $ cat <<'EOF' >> .hg/hgrc
+ > [templates]
+ > simple = "{rev}\n"
+ > simple2 = {rev}\n
+ > rev = "should not precede {rev} keyword\n"
+ > EOF
$ hg log -l1 -Tsimple
8
$ hg log -l1 -Tsimple2
8
+ $ hg log -l1 -Trev
+ should not precede 8 keyword
+ $ hg log -l1 -T '{simple}'
+ 8
+
+Map file shouldn't see user templates:
+
+ $ cat <<EOF > tmpl
+ > changeset = 'nothing expanded:{simple}\n'
+ > EOF
+ $ hg log -l1 --style ./tmpl
+ nothing expanded:
Test templates and style maps in files:
@@ -1180,7 +1195,10 @@
common mistake:
- $ hg log -T '{changeset}\n'
+ $ cat << EOF > issue4758
+ > changeset = '{changeset}\n'
+ > EOF
+ $ hg log --style ./issue4758
abort: recursive reference 'changeset' in template
[255]
@@ -1196,7 +1214,10 @@
buildmap() -> gettemplate(), where no thunk was made:
- $ hg log -T '{files % changeset}\n'
+ $ cat << EOF > issue4758
+ > changeset = '{files % changeset}\n'
+ > EOF
+ $ hg log --style ./issue4758
abort: recursive reference 'changeset' in template
[255]
@@ -3503,6 +3524,9 @@
hg: parse error: shortest() expects an integer minlength
[255]
+ $ hg log -r 'wdir()' -T '{node|shortest}\n'
+ ffff
+
$ cd ..
Test shortest(node) with the repo having short hash collision:
--- a/tests/test-commandserver.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-commandserver.t Tue Jun 20 16:33:46 2017 -0400
@@ -187,11 +187,8 @@
... runcommand(server, ['-R', 'foo', 'showconfig', 'ui', 'defaults'])
*** runcommand showconfig
bundle.mainreporoot=$TESTTMP/repo
- defaults.backout=-d "0 0"
- defaults.commit=-d "0 0"
- defaults.shelve=--date "0 0"
- defaults.tag=-d "0 0"
devel.all-warnings=true
+ devel.default-date=0 0
largefiles.usercache=$TESTTMP/.cache/largefiles
ui.slash=True
ui.interactive=False
@@ -203,10 +200,6 @@
web\.ipv6=(?:True|False) (re)
*** runcommand init foo
*** runcommand -R foo showconfig ui defaults
- defaults.backout=-d "0 0"
- defaults.commit=-d "0 0"
- defaults.shelve=--date "0 0"
- defaults.tag=-d "0 0"
ui.slash=True
ui.interactive=False
ui.mergemarkers=detailed
@@ -579,9 +572,9 @@
$ cat <<EOF > dbgui.py
> import os, sys
- > from mercurial import cmdutil, commands
+ > from mercurial import commands, registrar
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
> @command("debuggetpass", norepo=True)
> def debuggetpass(ui):
> ui.write("%s\\n" % ui.getpass())
--- a/tests/test-commit-interactive-curses.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-commit-interactive-curses.t Tue Jun 20 16:33:46 2017 -0400
@@ -343,7 +343,7 @@
$ cp $HGRCPATH.pretest $HGRCPATH
$ chunkselectorinterface() {
> python <<EOF
- > from mercurial import hg, ui, parsers;\
+ > from mercurial import hg, ui;\
> repo = hg.repository(ui.ui.load(), ".");\
> print repo.ui.interface("chunkselector")
> EOF
--- a/tests/test-commit-interactive.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-commit-interactive.t Tue Jun 20 16:33:46 2017 -0400
@@ -909,7 +909,7 @@
> sys.stdout.write(''.join(escape(c) for c in l))
> EOF
- $ hg commit -i --encoding cp932 2>&1 <<EOF | python $TESTTMP/escape.py | grep '^y - '
+ $ hg commit -i --encoding cp932 2>&1 <<EOF | $PYTHON $TESTTMP/escape.py | grep '^y - '
> ?
> q
> EOF
--- a/tests/test-commit.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-commit.t Tue Jun 20 16:33:46 2017 -0400
@@ -15,20 +15,20 @@
$ hg commit -d '0 0' -m commit-1
$ echo foo >> foo
$ hg commit -d '1 4444444' -m commit-3
- abort: impossible time zone offset: 4444444
+ hg: parse error: impossible time zone offset: 4444444
[255]
$ hg commit -d '1 15.1' -m commit-4
- abort: invalid date: '1\t15.1'
+ hg: parse error: invalid date: '1\t15.1'
[255]
$ hg commit -d 'foo bar' -m commit-5
- abort: invalid date: 'foo bar'
+ hg: parse error: invalid date: 'foo bar'
[255]
$ hg commit -d ' 1 4444' -m commit-6
$ hg commit -d '111111111111 0' -m commit-7
- abort: date exceeds 32 bits: 111111111111
+ hg: parse error: date exceeds 32 bits: 111111111111
[255]
$ hg commit -d '-111111111111 0' -m commit-7
- abort: date exceeds 32 bits: -111111111111
+ hg: parse error: date exceeds 32 bits: -111111111111
[255]
$ echo foo >> foo
$ hg commit -d '1901-12-13 20:45:52 +0000' -m commit-7-2
@@ -38,10 +38,10 @@
3 1901-12-13 20:45:52 +0000
2 1901-12-13 20:45:52 +0000
$ hg commit -d '1901-12-13 20:45:51 +0000' -m commit-7
- abort: date exceeds 32 bits: -2147483649
+ hg: parse error: date exceeds 32 bits: -2147483649
[255]
$ hg commit -d '-2147483649 0' -m commit-7
- abort: date exceeds 32 bits: -2147483649
+ hg: parse error: date exceeds 32 bits: -2147483649
[255]
commit added file that has been deleted
@@ -120,7 +120,7 @@
An empty date was interpreted as epoch origin
$ echo foo >> foo
- $ hg commit -d '' -m commit-no-date
+ $ hg commit -d '' -m commit-no-date --config devel.default-date=
$ hg tip --template '{date|isodate}\n' | grep '1970'
[1]
--- a/tests/test-completion.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-completion.t Tue Jun 20 16:33:46 2017 -0400
@@ -99,6 +99,7 @@
debugnamecomplete
debugobsolete
debugpathcomplete
+ debugpickmergetool
debugpushkey
debugpvec
debugrebuilddirstate
@@ -110,6 +111,7 @@
debugsub
debugsuccessorssets
debugtemplate
+ debugupdatecaches
debugupgraderepo
debugwalk
debugwireargs
@@ -215,7 +217,7 @@
Show all commands + options
$ hg debugcommands
add: include, exclude, subrepos, dry-run
- annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, ignore-all-space, ignore-space-change, ignore-blank-lines, include, exclude, template
+ annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, skip, ignore-all-space, ignore-space-change, ignore-blank-lines, include, exclude, template
clone: noupdate, updaterev, rev, branch, pull, uncompressed, ssh, remotecmd, insecure
commit: addremove, close-branch, amend, secret, edit, interactive, include, exclude, message, logfile, date, user, subrepos
diff: rev, change, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, unified, stat, root, include, exclude, subrepos
@@ -239,13 +241,13 @@
branch: force, clean
branches: active, closed, template
bundle: force, rev, branch, base, all, type, ssh, remotecmd, insecure
- cat: output, rev, decode, include, exclude
+ cat: output, rev, decode, include, exclude, template
config: untrusted, edit, local, global, template
copy: after, force, include, exclude, dry-run
debugancestor:
debugapplystreamclonebundle:
debugbuilddag: mergeable-file, overwritten-file, new-file
- debugbundle: all, spec
+ debugbundle: all, part-type, spec
debugcheckstate:
debugcolor: style
debugcommands:
@@ -270,19 +272,21 @@
debuglocks: force-lock, force-wlock
debugmergestate:
debugnamecomplete:
- debugobsolete: flags, record-parents, rev, index, delete, date, user, template
+ debugobsolete: flags, record-parents, rev, exclusive, index, delete, date, user, template
debugpathcomplete: full, normal, added, removed
+ debugpickmergetool: rev, changedelete, include, exclude, tool
debugpushkey:
debugpvec:
debugrebuilddirstate: rev, minimal
debugrebuildfncache:
debugrename: rev
debugrevlog: changelog, manifest, dir, dump
- debugrevspec: optimize, show-stage, no-optimized, verify-optimized
+ debugrevspec: optimize, show-revs, show-set, show-stage, no-optimized, verify-optimized
debugsetparents:
debugsub: rev
debugsuccessorssets:
debugtemplate: rev, define
+ debugupdatecaches:
debugupgraderepo: optimize, run
debugwalk: include, exclude
debugwireargs: three, four, five, ssh, remotecmd, insecure
--- a/tests/test-conflict.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-conflict.t Tue Jun 20 16:33:46 2017 -0400
@@ -220,6 +220,7 @@
$ hg up -C
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "e0693e20f496: 123456789012345678901234567890123456789012345678901234567890????"
1 other heads for branch "default"
$ printf "\n\nEnd of file\n" >> a
$ hg ci -m "Add some stuff at the end"
@@ -258,6 +259,7 @@
$ hg up -C
1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ updated to "18b51d585961: Add some stuff at the beginning"
1 other heads for branch "default"
$ hg merge --tool :merge-local
merging a
--- a/tests/test-context.py Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-context.py Tue Jun 20 16:33:46 2017 -0400
@@ -1,9 +1,11 @@
from __future__ import absolute_import, print_function
import os
+from mercurial.node import hex
from mercurial import (
context,
encoding,
hg,
+ scmutil,
ui as uimod,
)
@@ -146,3 +148,34 @@
match=scmutil.matchfiles(repo, ['bar-r', 'foo']),
listclean=True))
print('wcctx._status=%s' % (str(wcctx._status)))
+
+os.chdir('..')
+
+# test manifestlog being changed
+print('== commit with manifestlog invalidated')
+
+repo = hg.repository(u, 'test2', create=1)
+os.chdir('test2')
+
+# make some commits
+for i in [b'1', b'2', b'3']:
+ with open(i, 'wb') as f:
+ f.write(i)
+ status = scmutil.status([], [i], [], [], [], [], [])
+ ctx = context.workingcommitctx(repo, status, text=i, user=b'test@test.com',
+ date=(0, 0))
+ ctx.p1().manifest() # side effect: cache manifestctx
+ n = repo.commitctx(ctx)
+ print('commit %s: %s' % (i, hex(n)))
+
+ # touch 00manifest.i mtime so storecache could expire.
+ # repo.__dict__['manifestlog'] is deleted by transaction releasefn.
+ st = repo.svfs.stat('00manifest.i')
+ repo.svfs.utime('00manifest.i', (st.st_mtime + 1, st.st_mtime + 1))
+
+ # read the file just committed
+ try:
+ if repo[n][i].data() != i:
+ print('data mismatch')
+ except Exception as ex:
+ print('cannot read data: %r' % ex)
--- a/tests/test-context.py.out Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-context.py.out Tue Jun 20 16:33:46 2017 -0400
@@ -44,3 +44,7 @@
wcctx._status=<status modified=['bar-m'], added=['bar-a'], removed=[], deleted=[], unknown=[], ignored=[], clean=[]>
<status modified=[], added=['bar-r'], removed=[], deleted=[], unknown=[], ignored=[], clean=['foo']>
wcctx._status=<status modified=['bar-m'], added=['bar-a'], removed=[], deleted=[], unknown=[], ignored=[], clean=[]>
+== commit with manifestlog invalidated
+commit 1: 2efe531a913fa648867ab8824360371679d05a65
+commit 2: 2caca91f6362020334384ebe27bae67315298abf
+commit 3: abd6b0f49f338be22b094ef2b7425e8048f8337b
--- a/tests/test-contrib-check-code.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-contrib-check-code.t Tue Jun 20 16:33:46 2017 -0400
@@ -152,6 +152,15 @@
> $ function onwarn {}
warning: don't use 'function', use old style
[1]
+ $ cat > error.t <<EOF
+ > $ [ foo == bar ]
+ > EOF
+ $ "$check_code" error.t
+ error.t:1:
+ > $ [ foo == bar ]
+ [ foo == bar ] is a bashism, use [ foo = bar ] instead
+ [1]
+ $ rm error.t
$ cat > raise-format.py <<EOF
> raise SomeException, message
> # this next line is okay
--- a/tests/test-contrib-perf.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-contrib-perf.t Tue Jun 20 16:33:46 2017 -0400
@@ -51,6 +51,8 @@
(no help text available)
perfannotate (no help text available)
perfbdiff benchmark a bdiff between revisions
+ perfbookmarks
+ benchmark parsing bookmarks from disk to memory
perfbranchmap
benchmark the update of a branchmap
perfcca (no help text available)
@@ -93,12 +95,16 @@
perfparents (no help text available)
perfpathcopies
(no help text available)
+ perfphases benchmark phasesets computation
perfrawfiles (no help text available)
- perfrevlog Benchmark reading a series of revisions from a revlog.
perfrevlogchunks
Benchmark operations on revlog chunks.
+ perfrevlogindex
+ Benchmark operations against a revlog index.
perfrevlogrevision
Benchmark obtaining a revlog revision.
+ perfrevlogrevisions
+ Benchmark reading a series of revisions from a revlog.
perfrevrange (no help text available)
perfrevset benchmark the execution time of a revset
perfstartup (no help text available)
@@ -118,6 +124,7 @@
$ hg perfannotate a
$ hg perfbdiff -c 1
$ hg perfbdiff --alldata 1
+ $ hg perfbookmarks
$ hg perfbranchmap
$ hg perfcca
$ hg perfchangegroupchangelog
@@ -145,7 +152,8 @@
$ hg perfnodelookup 2
$ hg perfpathcopies 1 2
$ hg perfrawfiles 2
- $ hg perfrevlog .hg/store/data/a.i
+ $ hg perfrevlogindex -c
+ $ hg perfrevlogrevisions .hg/store/data/a.i
$ hg perfrevlogrevision -m 0
$ hg perfrevlogchunks -c
$ hg perfrevrange
@@ -165,7 +173,3 @@
$ (hg files -r 1.2 glob:mercurial/*.c glob:mercurial/*.py;
> hg files -r tip glob:mercurial/*.c glob:mercurial/*.py) |
> "$TESTDIR"/check-perf-code.py contrib/perf.py
- contrib/perf.py:869:
- > r.revision(r.node(x))
- don't convert rev to node before passing to revision(nodeorrev)
- [1]
--- a/tests/test-convert-bzr-ghosts.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-convert-bzr-ghosts.t Tue Jun 20 16:33:46 2017 -0400
@@ -21,7 +21,7 @@
$ bzr add -q somefile
$ bzr commit -q -m 'Initial layout setup'
$ echo morecontent >> somefile
- $ python ../../ghostcreator.py 'Commit with ghost revision' ghostrev
+ $ $PYTHON ../../ghostcreator.py 'Commit with ghost revision' ghostrev
$ cd ..
$ hg convert source source-hg
initializing destination source-hg repository
--- a/tests/test-convert-bzr-treeroot.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-convert-bzr-treeroot.t Tue Jun 20 16:33:46 2017 -0400
@@ -20,7 +20,7 @@
$ echo content > file
$ bzr add -q file
$ bzr commit -q -m 'Initial add'
- $ python ../../treeset.py 'Changed root' new
+ $ $PYTHON ../../treeset.py 'Changed root' new
$ cd ..
$ hg convert source source-hg
initializing destination source-hg repository
--- a/tests/test-convert-bzr.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-convert-bzr.t Tue Jun 20 16:33:46 2017 -0400
@@ -129,10 +129,10 @@
$ bzr branch -q source source-improve
$ cd source
$ echo more >> a
- $ python ../helper.py 'Editing a' 100
+ $ $PYTHON ../helper.py 'Editing a' 100
$ cd ../source-improve
$ echo content3 >> b
- $ python ../helper.py 'Editing b' 200
+ $ $PYTHON ../helper.py 'Editing b' 200
$ cd ../source
$ bzr merge -q ../source-improve
$ bzr commit -q -m 'Merged improve branch'
--- a/tests/test-convert-clonebranches.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-convert-clonebranches.t Tue Jun 20 16:33:46 2017 -0400
@@ -40,7 +40,7 @@
convert
$ hg convert -v --config convert.hg.clonebranches=1 source dest |
- > python filter.py
+ > $PYTHON filter.py
3 adda
2 changea
1 addb
@@ -73,7 +73,7 @@
incremental conversion
$ hg convert -v --config convert.hg.clonebranches=1 source dest |
- > python filter.py
+ > $PYTHON filter.py
2 c1
pulling from branch0 into branch1
4 changesets found
--- a/tests/test-convert-git.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-convert-git.t Tue Jun 20 16:33:46 2017 -0400
@@ -948,7 +948,7 @@
$ hg convert git-repo4 git-repo4-broken-hg 2>&1 | grep 'abort:'
abort: cannot read changes in 1c0ce3c5886f83a1d78a7b517cdff5cf9ca17bdd
-#if no-windows
+#if no-windows git19
test for escaping the repo name (CVE-2016-3069)
--- a/tests/test-convert-hg-source.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-convert-hg-source.t Tue Jun 20 16:33:46 2017 -0400
@@ -130,7 +130,7 @@
> for i, l in enumerate(file(sys.argv[1]))]
> file(sys.argv[1], 'wb').write(''.join(lines))
> EOF
- $ python rewrite.py new/.hg/shamap
+ $ $PYTHON rewrite.py new/.hg/shamap
$ cd orig
$ hg up -qC 1
$ echo foo >> foo
--- a/tests/test-convert-svn-encoding.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-convert-svn-encoding.t Tue Jun 20 16:33:46 2017 -0400
@@ -53,6 +53,7 @@
source: svn:afeb9c47-92ff-4c0c-9f72-e1f6eb8ac9af/trunk@1
converting: 0/6 revisions (0.00%)
committing changelog
+ updating the branch cache
4 hello
source: svn:afeb9c47-92ff-4c0c-9f72-e1f6eb8ac9af/trunk@2
converting: 1/6 revisions (16.67%)
@@ -67,6 +68,7 @@
getting files: \xc3\xa9 2/2 files (100.00%) (esc)
committing manifest
committing changelog
+ updating the branch cache
3 copy files
source: svn:afeb9c47-92ff-4c0c-9f72-e1f6eb8ac9af/trunk@3
converting: 2/6 revisions (33.33%)
@@ -93,6 +95,7 @@
\xc3\xb9/e\xcc\x81: copy \xc3\xa0/e\xcc\x81:a9092a3d84a37b9993b5c73576f6de29b7ea50f6 (esc)
committing manifest
committing changelog
+ updating the branch cache
2 remove files
source: svn:afeb9c47-92ff-4c0c-9f72-e1f6eb8ac9af/trunk@4
converting: 3/6 revisions (50.00%)
@@ -109,18 +112,21 @@
committing files:
committing manifest
committing changelog
+ updating the branch cache
1 branch to branch?
source: svn:afeb9c47-92ff-4c0c-9f72-e1f6eb8ac9af/branches/branch?@5
converting: 4/6 revisions (66.67%)
reparent to file://*/svn-repo/branches/branch%C3%A9 (glob)
scanning paths: /branches/branch\xc3\xa9 0/1 paths (0.00%) (esc)
committing changelog
+ updating the branch cache
0 branch to branch?e
source: svn:afeb9c47-92ff-4c0c-9f72-e1f6eb8ac9af/branches/branch?e@6
converting: 5/6 revisions (83.33%)
reparent to file://*/svn-repo/branches/branch%C3%A9e (glob)
scanning paths: /branches/branch\xc3\xa9e 0/1 paths (0.00%) (esc)
committing changelog
+ updating the branch cache
reparent to file://*/svn-repo (glob)
reparent to file://*/svn-repo/branches/branch%C3%A9e (glob)
reparent to file://*/svn-repo (glob)
@@ -130,6 +136,7 @@
.hgtags
committing manifest
committing changelog
+ updating the branch cache
run hg sink post-conversion action
$ cd A-hg
$ hg up
--- a/tests/test-convert-svn-sink.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-convert-svn-sink.t Tue Jun 20 16:33:46 2017 -0400
@@ -10,7 +10,7 @@
> if [ $2 -gt 0 ]; then
> limit="--limit=$2"
> fi
- > svn log --xml -v $limit | python "$TESTDIR/svnxml.py"
+ > svn log --xml -v $limit | $PYTHON "$TESTDIR/svnxml.py"
> )
> }
--- a/tests/test-convert.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-convert.t Tue Jun 20 16:33:46 2017 -0400
@@ -125,9 +125,9 @@
where "original_branch_name" is the name of the branch in the source
repository, and "new_branch_name" is the name of the branch is the
- destination repository. No whitespace is allowed in the branch names. This
- can be used to (for instance) move code in one repository from "default"
- to a named branch.
+ destination repository. No whitespace is allowed in the new branch name.
+ This can be used to (for instance) move code in one repository from
+ "default" to a named branch.
Mercurial Source
################
@@ -581,3 +581,30 @@
branch=default
convert_revision=a3bc6100aa8ec03e00aaf271f1f50046fb432072
convert_source=mysource
+
+ $ cat > branchmap.txt << EOF
+ > old branch new_branch
+ > EOF
+
+ $ hg -R a branch -q 'old branch'
+ $ echo gg > a/g
+ $ hg -R a ci -m 'branch name with spaces'
+ $ hg convert --branchmap branchmap.txt a d
+ initializing destination d repository
+ scanning source...
+ sorting...
+ converting...
+ 6 a
+ 5 b
+ 4 c
+ 3 d
+ 2 e
+ 1 g
+ 0 branch name with spaces
+
+ $ hg -R a branches
+ old branch 6:a24a66ade009
+ default 5:a3bc6100aa8e (inactive)
+ $ hg -R d branches
+ new_branch 6:64ed208b732b
+ default 5:a3bc6100aa8e (inactive)
--- a/tests/test-copy.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-copy.t Tue Jun 20 16:33:46 2017 -0400
@@ -32,6 +32,7 @@
b: copy a:b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3
committing manifest
committing changelog
+ updating the branch cache
committed changeset 1:93580a2c28a50a56f63526fb305067e6fbf739c4
we should see two history entries
--- a/tests/test-ctxmanager.py Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-ctxmanager.py Tue Jun 20 16:33:46 2017 -0400
@@ -55,23 +55,21 @@
def test_raise_on_enter(self):
trace = []
addtrace = trace.append
- def go():
+ with self.assertRaises(ctxerror):
with util.ctxmanager(ctxmgr('a', addtrace),
lambda: raise_on_enter('b', addtrace)) as c:
c.enter()
addtrace('unreachable')
- self.assertRaises(ctxerror, go)
self.assertEqual(trace, [('enter', 'a'), ('raise', 'b'), ('exit', 'a')])
def test_raise_on_exit(self):
trace = []
addtrace = trace.append
- def go():
+ with self.assertRaises(ctxerror):
with util.ctxmanager(ctxmgr('a', addtrace),
lambda: raise_on_exit('b', addtrace)) as c:
c.enter()
addtrace('running')
- self.assertRaises(ctxerror, go)
self.assertEqual(trace, [('enter', 'a'), ('enter', 'b'), 'running',
('raise', 'b'), ('exit', 'a')])
--- a/tests/test-debugcommands.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-debugcommands.t Tue Jun 20 16:33:46 2017 -0400
@@ -109,6 +109,23 @@
6 5 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 1 (glob)
7 6 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 2 (glob)
8 7 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 3 (glob)
+
+Test WdirUnsupported exception
+
+ $ hg debugdata -c ffffffffffffffffffffffffffffffffffffffff
+ abort: working directory revision cannot be specified
+ [255]
+
+Test cache warming command
+
+ $ rm -rf .hg/cache/
+ $ hg debugupdatecaches --debug
+ updating the branch cache
+ $ ls -r .hg/cache/*
+ .hg/cache/rbc-revs-v1
+ .hg/cache/rbc-names-v1
+ .hg/cache/branch2-served
+
$ cd ..
Test internal debugstacktrace command
@@ -125,7 +142,7 @@
> dst('hi ...\\nfrom h hidden in g', 1, depth=2)
> f()
> EOF
- $ python debugstacktrace.py
+ $ $PYTHON debugstacktrace.py
stacktrace at:
debugstacktrace.py:10 in * (glob)
debugstacktrace.py:3 in f
--- a/tests/test-demandimport.py Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-demandimport.py Tue Jun 20 16:33:46 2017 -0400
@@ -52,6 +52,9 @@
import re as fred
print("fred =", f(fred))
+import re as remod
+print("remod =", f(remod))
+
import sys as re
print("re =", f(re))
@@ -59,10 +62,24 @@
print("fred.sub =", f(fred.sub))
print("fred =", f(fred))
+remod.escape # use remod
+print("remod =", f(remod))
+
print("re =", f(re))
print("re.stderr =", f(re.stderr))
print("re =", f(re))
+# Test access to special attributes through demandmod proxy
+from mercurial import pvec as pvecproxy
+print("pvecproxy =", f(pvecproxy))
+print("pvecproxy.__doc__ = %r"
+ % (' '.join(pvecproxy.__doc__.split()[:3]) + ' ...'))
+print("pvecproxy.__name__ = %r" % pvecproxy.__name__)
+# __name__ must be accessible via __dict__ so the relative imports can be
+# resolved
+print("pvecproxy.__dict__['__name__'] = %r" % pvecproxy.__dict__['__name__'])
+print("pvecproxy =", f(pvecproxy))
+
import contextlib
print("contextlib =", f(contextlib))
try:
--- a/tests/test-demandimport.py.out Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-demandimport.py.out Tue Jun 20 16:33:46 2017 -0400
@@ -9,13 +9,20 @@
hgweb_mod = <unloaded module 'hgweb_mod'>
hgweb = <module 'mercurial.hgweb' from '?'>
fred = <unloaded module 're'>
+remod = <unloaded module 're'>
re = <unloaded module 'sys'>
fred = <unloaded module 're'>
fred.sub = <function sub at 0x?>
fred = <proxied module 're'>
+remod = <module 're' from '?'>
re = <unloaded module 'sys'>
re.stderr = <open file '<whatever>', mode 'w' at 0x?>
re = <proxied module 'sys'>
+pvecproxy = <unloaded module 'pvec'>
+pvecproxy.__doc__ = 'A "pvec" is ...'
+pvecproxy.__name__ = 'mercurial.pvec'
+pvecproxy.__dict__['__name__'] = 'mercurial.pvec'
+pvecproxy = <proxied module 'pvec'>
contextlib = <unloaded module 'contextlib'>
contextlib.unknownattr = ImportError: cannot import name unknownattr
__import__('contextlib', ..., ['unknownattr']) = <module 'contextlib' from '?'>
--- a/tests/test-devel-warnings.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-devel-warnings.t Tue Jun 20 16:33:46 2017 -0400
@@ -3,10 +3,10 @@
> """A small extension that tests our developer warnings
> """
>
- > from mercurial import cmdutil, repair, util
+ > from mercurial import error, registrar, repair, util
>
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
>
> @command('buggylocking', [], '')
> def buggylocking(ui, repo):
@@ -61,6 +61,9 @@
> @command('nouiwarning', [], '')
> def nouiwarning(ui, repo):
> util.nouideprecwarn('this is a test', '13.37')
+ > @command('programmingerror', [], '')
+ > def programmingerror(ui, repo):
+ > raise error.ProgrammingError('something went wrong', hint='try again')
> EOF
$ cat << EOF >> $HGRCPATH
@@ -105,7 +108,6 @@
$ hg add a
$ hg commit -m a
$ hg stripintr 2>&1 | egrep -v '^(\*\*| )'
- saved backup bundle to $TESTTMP/lock-checker/.hg/strip-backup/*-backup.hg (glob)
Traceback (most recent call last):
mercurial.error.ProgrammingError: cannot strip from inside a transaction
@@ -163,9 +165,23 @@
** Python * (glob)
** Mercurial Distributed SCM (*) (glob)
** Extensions loaded: * (glob)
+ ** ProgrammingError: transaction requires locking
Traceback (most recent call last):
mercurial.error.ProgrammingError: transaction requires locking
+ $ hg programmingerror 2>&1 | egrep -v '^ '
+ ** Unknown exception encountered with possibly-broken third-party extension buggylocking
+ ** which supports versions unknown of Mercurial.
+ ** Please disable buggylocking and try your action again.
+ ** If that fixes the bug please report it to the extension author.
+ ** Python * (glob)
+ ** Mercurial Distributed SCM (*) (glob)
+ ** Extensions loaded: * (glob)
+ ** ProgrammingError: something went wrong
+ ** (try again)
+ Traceback (most recent call last):
+ mercurial.error.ProgrammingError: something went wrong
+
Old style deprecation warning
$ hg nouiwarning
--- a/tests/test-diff-binary-file.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-diff-binary-file.t Tue Jun 20 16:33:46 2017 -0400
@@ -83,7 +83,7 @@
> path = sys.argv[1]
> open(path, 'wb').write('\x00\x01\x02\x03')
> EOF
- $ python writebin.py binfile.bin
+ $ $PYTHON writebin.py binfile.bin
$ hg add binfile.bin
$ hg ci -m 'add binfile.bin'
--- a/tests/test-diffstat.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-diffstat.t Tue Jun 20 16:33:46 2017 -0400
@@ -105,3 +105,83 @@
$ hg diff --stat --root . -I old
$ cd ..
+
+Files with lines beginning with '--' or '++' should be properly counted in diffstat
+
+ $ hg up -Cr tip
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ rm dir1/new
+ $ rm dir2/new
+ $ rm "file with spaces"
+ $ cat > file << EOF
+ > line 1
+ > line 2
+ > line 3
+ > EOF
+ $ hg commit -Am file
+ adding file
+
+Lines added starting with '--' should count as additions
+ $ cat > file << EOF
+ > line 1
+ > -- line 2, with dashes
+ > line 3
+ > EOF
+
+ $ hg diff --root .
+ diff -r be1569354b24 file
+ --- a/file Thu Jan 01 00:00:00 1970 +0000
+ +++ b/file * (glob)
+ @@ -1,3 +1,3 @@
+ line 1
+ -line 2
+ +-- line 2, with dashes
+ line 3
+
+ $ hg diff --root . --stat
+ file | 2 +-
+ 1 files changed, 1 insertions(+), 1 deletions(-)
+
+Lines changed starting with '--' should count as deletions
+ $ hg commit -m filev2
+ $ cat > file << EOF
+ > line 1
+ > -- line 2, with dashes, changed again
+ > line 3
+ > EOF
+
+ $ hg diff --root .
+ diff -r 160f7c034df6 file
+ --- a/file Thu Jan 01 00:00:00 1970 +0000
+ +++ b/file * (glob)
+ @@ -1,3 +1,3 @@
+ line 1
+ --- line 2, with dashes
+ +-- line 2, with dashes, changed again
+ line 3
+
+ $ hg diff --root . --stat
+ file | 2 +-
+ 1 files changed, 1 insertions(+), 1 deletions(-)
+
+Lines changed starting with '--' should count as deletions
+and starting with '++' should count as additions
+ $ cat > file << EOF
+ > line 1
+ > ++ line 2, switched dashes to plusses
+ > line 3
+ > EOF
+
+ $ hg diff --root .
+ diff -r 160f7c034df6 file
+ --- a/file Thu Jan 01 00:00:00 1970 +0000
+ +++ b/file * (glob)
+ @@ -1,3 +1,3 @@
+ line 1
+ --- line 2, with dashes
+ +++ line 2, switched dashes to plusses
+ line 3
+
+ $ hg diff --root . --stat
+ file | 2 +-
+ 1 files changed, 1 insertions(+), 1 deletions(-)
--- a/tests/test-dirstate-race.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-dirstate-race.t Tue Jun 20 16:33:46 2017 -0400
@@ -45,7 +45,7 @@
#endif
$ hg add b dir1 d e
- adding dir1/c
+ adding dir1/c (glob)
$ hg commit -m test2
$ cat >> $TESTTMP/dirstaterace.py << EOF
@@ -57,7 +57,8 @@
> extensions.wrapfunction(context.workingctx, '_checklookup', overridechecklookup)
> def overridechecklookup(orig, self, files):
> # make an update that changes the dirstate from underneath
- > self._repo.ui.system(self._repo.ui.config('dirstaterace', 'command'), cwd=self._repo.root)
+ > self._repo.ui.system(r"sh '$TESTTMP/dirstaterace.sh'",
+ > cwd=self._repo.root)
> return orig(self, files)
> EOF
@@ -73,8 +74,11 @@
definitely a bug, but the fix for that is hard and the next status run is fine
anyway.
- $ hg status --config extensions.dirstaterace=$TESTTMP/dirstaterace.py \
- > --config dirstaterace.command='rm b && rm -r dir1 && rm d && mkdir d && rm e && mkdir e'
+ $ cat > $TESTTMP/dirstaterace.sh <<EOF
+ > rm b && rm -r dir1 && rm d && mkdir d && rm e && mkdir e
+ > EOF
+
+ $ hg status --config extensions.dirstaterace=$TESTTMP/dirstaterace.py
M d
M e
! b
@@ -91,3 +95,140 @@
! d
! dir1/c
! e
+
+ $ rmdir d e
+ $ hg update -C -q .
+
+Test that dirstate changes aren't written out at the end of "hg
+status", if .hg/dirstate is already changed simultaneously before
+acquisition of wlock in workingctx._poststatusfixup().
+
+This avoidance is important to keep consistency of dirstate in race
+condition (see issue5584 for detail).
+
+ $ hg parents -q
+ 1:* (glob)
+
+ $ hg debugrebuilddirstate
+ $ hg debugdirstate
+ n 0 -1 unset a
+ n 0 -1 unset b
+ n 0 -1 unset d
+ n 0 -1 unset dir1/c
+ n 0 -1 unset e
+
+ $ cat > $TESTTMP/dirstaterace.sh <<EOF
+ > # This script assumes timetable of typical issue5584 case below:
+ > #
+ > # 1. "hg status" loads .hg/dirstate
+ > # 2. "hg status" confirms clean-ness of FILE
+ > # 3. "hg update -C 0" updates the working directory simultaneously
+ > # (FILE is removed, and FILE is dropped from .hg/dirstate)
+ > # 4. "hg status" acquires wlock
+ > # (.hg/dirstate is re-loaded = no FILE entry in dirstate)
+ > # 5. "hg status" marks FILE in dirstate as clean
+ > # (FILE entry is added to in-memory dirstate)
+ > # 6. "hg status" writes dirstate changes into .hg/dirstate
+ > # (FILE entry is written into .hg/dirstate)
+ > #
+ > # To reproduce similar situation easily and certainly, #2 and #3
+ > # are swapped. "hg cat" below ensures #2 on "hg status" side.
+ >
+ > hg update -q -C 0
+ > hg cat -r 1 b > b
+ > EOF
+
+"hg status" below should excludes "e", of which exec flag is set, for
+portability of test scenario, because unsure but missing "e" is
+treated differently in _checklookup() according to runtime platform.
+
+- "missing(!)" on POSIX, "pctx[f].cmp(self[f])" raises ENOENT
+- "modified(M)" on Windows, "self.flags(f) != pctx.flags(f)" is True
+
+ $ hg status --config extensions.dirstaterace=$TESTTMP/dirstaterace.py --debug -X path:e
+ skip updating dirstate: identity mismatch
+ M a
+ ! d
+ ! dir1/c
+
+ $ hg parents -q
+ 0:* (glob)
+ $ hg files
+ a
+ $ hg debugdirstate
+ n * * * a (glob)
+
+ $ rm b
+
+#if fsmonitor
+
+Create fsmonitor state.
+
+ $ hg status
+ $ f --type .hg/fsmonitor.state
+ .hg/fsmonitor.state: file
+
+Test that invalidating fsmonitor state in the middle (which doesn't require the
+wlock) causes the fsmonitor update to be skipped.
+hg debugrebuilddirstate ensures that the dirstaterace hook will be called, but
+it also invalidates the fsmonitor state. So back it up and restore it.
+
+ $ mv .hg/fsmonitor.state .hg/fsmonitor.state.tmp
+ $ hg debugrebuilddirstate
+ $ mv .hg/fsmonitor.state.tmp .hg/fsmonitor.state
+
+ $ cat > $TESTTMP/dirstaterace.sh <<EOF
+ > rm .hg/fsmonitor.state
+ > EOF
+
+ $ hg status --config extensions.dirstaterace=$TESTTMP/dirstaterace.py --debug
+ skip updating fsmonitor.state: identity mismatch
+ $ f .hg/fsmonitor.state
+ .hg/fsmonitor.state: file not found
+
+#endif
+
+Set up a rebase situation for issue5581.
+
+ $ echo c2 > a
+ $ echo c2 > b
+ $ hg add b
+ $ hg commit -m c2
+ created new head
+ $ echo c3 >> a
+ $ hg commit -m c3
+ $ hg update 2
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo c4 >> a
+ $ echo c4 >> b
+ $ hg commit -m c4
+ created new head
+
+Configure a merge tool that runs status in the middle of the rebase.
+
+ $ cat >> $TESTTMP/mergetool-race.sh << EOF
+ > echo "custom merge tool"
+ > printf "c2\nc3\nc4\n" > \$1
+ > hg --cwd "$TESTTMP/repo" status
+ > echo "custom merge tool end"
+ > EOF
+ $ cat >> $HGRCPATH << EOF
+ > [extensions]
+ > rebase =
+ > [merge-tools]
+ > test.executable=sh
+ > test.args=$TESTTMP/mergetool-race.sh \$output
+ > EOF
+
+ $ hg rebase -s . -d 3 --tool test
+ rebasing 4:b08445fd6b2a "c4" (tip)
+ merging a
+ custom merge tool
+ M a
+ ? a.orig
+ custom merge tool end
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/* (glob)
+
+This hg status should be empty, whether or not fsmonitor is enabled (issue5581).
+
+ $ hg status
--- a/tests/test-doctest.py Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-doctest.py Tue Jun 20 16:33:46 2017 -0400
@@ -25,6 +25,7 @@
testmod('mercurial.changelog')
testmod('mercurial.color')
testmod('mercurial.config')
+testmod('mercurial.context')
testmod('mercurial.dagparser', optionflags=doctest.NORMALIZE_WHITESPACE)
testmod('mercurial.dispatch')
testmod('mercurial.encoding')
--- a/tests/test-duplicateoptions.py Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-duplicateoptions.py Tue Jun 20 16:33:46 2017 -0400
@@ -6,7 +6,7 @@
ui as uimod,
)
-ignore = set(['highlight', 'win32text', 'factotum'])
+ignore = {'highlight', 'win32text', 'factotum'}
if os.name != 'nt':
ignore.add('win32mbcs')
--- a/tests/test-encoding-align.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-encoding-align.t Tue Jun 20 16:33:46 2017 -0400
@@ -16,9 +16,9 @@
> f = file('l', 'w'); f.write(l); f.close()
> # instant extension to show list of options
> f = file('showoptlist.py', 'w'); f.write("""# encoding: utf-8
- > from mercurial import cmdutil
+ > from mercurial import registrar
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
>
> @command('showoptlist',
> [('s', 'opt1', '', 'short width' + ' %(s)s' * 8, '%(s)s'),
--- a/tests/test-encoding-textwrap.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-encoding-textwrap.t Tue Jun 20 16:33:46 2017 -0400
@@ -6,10 +6,10 @@
define commands to display help text
$ cat << EOF > show.py
- > from mercurial import cmdutil
+ > from mercurial import registrar
>
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
>
> # Japanese full-width characters:
> @command('show_full_ja', [], '')
--- a/tests/test-eol.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-eol.t Tue Jun 20 16:33:46 2017 -0400
@@ -69,7 +69,7 @@
> echo '% a.txt'
> cat a.txt
> hg diff
- > python ../switch-eol.py $1 a.txt
+ > $PYTHON ../switch-eol.py $1 a.txt
> echo '% hg diff only reports a single changed line:'
> hg diff
> echo "% reverting back to $1 format"
--- a/tests/test-eolfilename.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-eolfilename.t Tue Jun 20 16:33:46 2017 -0400
@@ -33,6 +33,7 @@
[255]
$ echo foo > "$A"
$ hg debugwalk
+ matcher: <alwaysmatcher>
f he\r (no-eol) (esc)
llo he\r (no-eol) (esc)
llo
--- a/tests/test-export.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-export.t Tue Jun 20 16:33:46 2017 -0400
@@ -137,6 +137,28 @@
foo-9
+foo-10
+Exporting wdir revision:
+
+ $ echo "foo-wdir" >> foo
+ $ hg export 'wdir()'
+ # HG changeset patch
+ # User test
+ # Date 0 0
+ # Thu Jan 01 00:00:00 1970 +0000
+ # Node ID ffffffffffffffffffffffffffffffffffffffff
+ # Parent f3acbafac161ec68f1598af38f794f28847ca5d3
+
+
+ diff -r f3acbafac161 foo
+ --- a/foo Thu Jan 01 00:00:00 1970 +0000
+ +++ b/foo Thu Jan 01 00:00:00 1970 +0000
+ @@ -10,3 +10,4 @@
+ foo-9
+ foo-10
+ foo-11
+ +foo-wdir
+ $ hg revert -q foo
+
No filename should be printed if stdout is specified explicitly:
$ hg export -v 1 -o -
--- a/tests/test-extdiff.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-extdiff.t Tue Jun 20 16:33:46 2017 -0400
@@ -62,15 +62,10 @@
Should diff cloned files directly:
-#if windows
$ hg falabala -r 0:1
- diffing "*\\extdiff.*\\a.8a5febb7f867\\a" "a.34eed99112ab\\a" (glob)
+ diffing "*\\extdiff.*\\a.8a5febb7f867\\a" "a.34eed99112ab\\a" (glob) (windows !)
+ diffing */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob) (no-windows !)
[1]
-#else
- $ hg falabala -r 0:1
- diffing */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob)
- [1]
-#endif
Specifying an empty revision should abort.
@@ -92,41 +87,27 @@
Should diff cloned file against wc file:
-#if windows
$ hg falabala
- diffing "*\\extdiff.*\\a.2a13a4d2da36\\a" "*\\a\\a" (glob)
+ diffing "*\\extdiff.*\\a.2a13a4d2da36\\a" "*\\a\\a" (glob) (windows !)
+ diffing */extdiff.*/a.2a13a4d2da36/a */a/a (glob) (no-windows !)
[1]
-#else
- $ hg falabala
- diffing */extdiff.*/a.2a13a4d2da36/a */a/a (glob)
- [1]
-#endif
Test --change option:
$ hg ci -d '2 0' -mtest3
-#if windows
+
$ hg falabala -c 1
- diffing "*\\extdiff.*\\a.8a5febb7f867\\a" "a.34eed99112ab\\a" (glob)
+ diffing "*\\extdiff.*\\a.8a5febb7f867\\a" "a.34eed99112ab\\a" (glob) (windows !)
+ diffing */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob) (no-windows !)
[1]
-#else
- $ hg falabala -c 1
- diffing */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob)
- [1]
-#endif
Check diff are made from the first parent:
-#if windows
$ hg falabala -c 3 || echo "diff-like tools yield a non-zero exit code"
- diffing "*\\extdiff.*\\a.2a13a4d2da36\\a" "a.46c0e4daeb72\\a" (glob)
+ diffing "*\\extdiff.*\\a.2a13a4d2da36\\a" "a.46c0e4daeb72\\a" (glob) (windows !)
+ diffing */extdiff.*/a.2a13a4d2da36/a a.46c0e4daeb72/a (glob) (no-windows !)
diff-like tools yield a non-zero exit code
-#else
- $ hg falabala -c 3 || echo "diff-like tools yield a non-zero exit code"
- diffing */extdiff.*/a.2a13a4d2da36/a a.46c0e4daeb72/a (glob)
- diff-like tools yield a non-zero exit code
-#endif
issue3153: ensure using extdiff with removed subrepos doesn't crash:
@@ -158,21 +139,16 @@
> EOF
$ hg update -q -C 0
$ echo a >> a
-#if windows
+
$ hg --debug 4463a | grep '^running'
- running 'echo a-naked \'single quoted\' "double quoted" "*\\a" "*\\a"' in */extdiff.* (glob)
+ running 'echo a-naked \'single quoted\' "double quoted" "*\\a" "*\\a"' in */extdiff.* (glob) (windows !)
+ running 'echo a-naked \'single quoted\' "double quoted" */a $TESTTMP/a/a' in */extdiff.* (glob) (no-windows !)
$ hg --debug 4463b | grep '^running'
- running 'echo b-naked \'single quoted\' "double quoted" "*\\a" "*\\a"' in */extdiff.* (glob)
+ running 'echo b-naked \'single quoted\' "double quoted" "*\\a" "*\\a"' in */extdiff.* (glob) (windows !)
+ running 'echo b-naked \'single quoted\' "double quoted" */a $TESTTMP/a/a' in */extdiff.* (glob) (no-windows !)
$ hg --debug echo | grep '^running'
- running '*echo* "*\\a" "*\\a"' in */extdiff.* (glob)
-#else
- $ hg --debug 4463a | grep '^running'
- running 'echo a-naked \'single quoted\' "double quoted" */a $TESTTMP/a/a' in */extdiff.* (glob)
- $ hg --debug 4463b | grep '^running'
- running 'echo b-naked \'single quoted\' "double quoted" */a $TESTTMP/a/a' in */extdiff.* (glob)
- $ hg --debug echo | grep '^running'
- running '*echo */a $TESTTMP/a/a' in */extdiff.* (glob)
-#endif
+ running '*echo* "*\\a" "*\\a"' in */extdiff.* (glob) (windows !)
+ running '*echo */a $TESTTMP/a/a' in */extdiff.* (glob) (no-windows !)
(getting options from other than extdiff section)
@@ -189,29 +165,22 @@
> [merge-tools]
> 4463b3.diffargs = b3-naked 'single quoted' "double quoted"
> EOF
-#if windows
+
$ hg --debug 4463b2 | grep '^running'
- running 'echo b2-naked \'single quoted\' "double quoted" "*\\a" "*\\a"' in */extdiff.* (glob)
+ running 'echo b2-naked \'single quoted\' "double quoted" "*\\a" "*\\a"' in */extdiff.* (glob) (windows !)
+ running 'echo b2-naked \'single quoted\' "double quoted" */a $TESTTMP/a/a' in */extdiff.* (glob) (no-windows !)
$ hg --debug 4463b3 | grep '^running'
- running 'echo b3-naked \'single quoted\' "double quoted" "*\\a" "*\\a"' in */extdiff.* (glob)
+ running 'echo b3-naked \'single quoted\' "double quoted" "*\\a" "*\\a"' in */extdiff.* (glob) (windows !)
+ running 'echo b3-naked \'single quoted\' "double quoted" */a $TESTTMP/a/a' in */extdiff.* (glob) (no-windows !)
$ hg --debug 4463b4 | grep '^running'
- running 'echo "*\\a" "*\\a"' in */extdiff.* (glob)
+ running 'echo "*\\a" "*\\a"' in */extdiff.* (glob) (windows !)
+ running 'echo */a $TESTTMP/a/a' in */extdiff.* (glob) (no-windows !)
$ hg --debug 4463b4 --option b4-naked --option 'being quoted' | grep '^running'
- running 'echo b4-naked "being quoted" "*\\a" "*\\a"' in */extdiff.* (glob)
+ running 'echo b4-naked "being quoted" "*\\a" "*\\a"' in */extdiff.* (glob) (windows !)
+ running "echo b4-naked 'being quoted' */a $TESTTMP/a/a" in */extdiff.* (glob) (no-windows !)
$ hg --debug extdiff -p echo --option echo-naked --option 'being quoted' | grep '^running'
- running 'echo echo-naked "being quoted" "*\\a" "*\\a"' in */extdiff.* (glob)
-#else
- $ hg --debug 4463b2 | grep '^running'
- running 'echo b2-naked \'single quoted\' "double quoted" */a $TESTTMP/a/a' in */extdiff.* (glob)
- $ hg --debug 4463b3 | grep '^running'
- running 'echo b3-naked \'single quoted\' "double quoted" */a $TESTTMP/a/a' in */extdiff.* (glob)
- $ hg --debug 4463b4 | grep '^running'
- running 'echo */a $TESTTMP/a/a' in */extdiff.* (glob)
- $ hg --debug 4463b4 --option b4-naked --option 'being quoted' | grep '^running'
- running "echo b4-naked 'being quoted' */a $TESTTMP/a/a" in */extdiff.* (glob)
- $ hg --debug extdiff -p echo --option echo-naked --option 'being quoted' | grep '^running'
- running "echo echo-naked 'being quoted' */a $TESTTMP/a/a" in */extdiff.* (glob)
-#endif
+ running 'echo echo-naked "being quoted" "*\\a" "*\\a"' in */extdiff.* (glob) (windows !)
+ running "echo echo-naked 'being quoted' */a $TESTTMP/a/a" in */extdiff.* (glob) (no-windows !)
$ touch 'sp ace'
$ hg add 'sp ace'
@@ -228,12 +197,10 @@
> odd.diffargs = --foo='\$clabel' '\$clabel' "--bar=\$clabel" "\$clabel"
> odd.executable = echo
> EOF
-#if windows
-TODO
-#else
+
$ hg --debug odd | grep '^running'
- running "*/echo --foo='sp ace' 'sp ace' --bar='sp ace' 'sp ace'" in * (glob)
-#endif
+ running '"*\\echo.exe" --foo="sp ace" "sp ace" --bar="sp ace" "sp ace"' in * (glob) (windows !)
+ running "*/echo --foo='sp ace' 'sp ace' --bar='sp ace' 'sp ace'" in * (glob) (no-windows !)
Empty argument must be quoted
@@ -243,22 +210,20 @@
> [merge-tools]
> kdiff3.diffargs=--L1 \$plabel1 --L2 \$clabel \$parent \$child
> EOF
-#if windows
- $ hg --debug kdiff3 -r0 | grep '^running'
- running 'echo --L1 "@0" --L2 "" a.8a5febb7f867 a' in * (glob)
-#else
+
$ hg --debug kdiff3 -r0 | grep '^running'
- running "echo --L1 '@0' --L2 '' a.8a5febb7f867 a" in * (glob)
-#endif
+ running 'echo --L1 "@0" --L2 "" a.8a5febb7f867 a' in * (glob) (windows !)
+ running "echo --L1 '@0' --L2 '' a.8a5febb7f867 a" in * (glob) (no-windows !)
-#if execbit
Test extdiff of multiple files in tmp dir:
$ hg update -C 0 > /dev/null
$ echo changed > a
$ echo changed > b
+#if execbit
$ chmod +x b
+#endif
Diff in working directory, before:
@@ -270,8 +235,8 @@
-a
+changed
diff --git a/b b/b
- old mode 100644
- new mode 100755
+ old mode 100644 (execbit !)
+ new mode 100755 (execbit !)
--- a/b
+++ b/b
@@ -1,1 +1,1 @@
@@ -284,14 +249,16 @@
Prepare custom diff/edit tool:
$ cat > 'diff tool.py' << EOT
- > #!/usr/bin/env python
+ > #!$PYTHON
> import time
> time.sleep(1) # avoid unchanged-timestamp problems
> file('a/a', 'ab').write('edited\n')
> file('a/b', 'ab').write('edited\n')
> EOT
+#if execbit
$ chmod +x 'diff tool.py'
+#endif
will change to /tmp/extdiff.TMP and populate directories a.TMP and a
and start tool
@@ -310,8 +277,8 @@
+changed
+edited
diff --git a/b b/b
- old mode 100644
- new mode 100755
+ old mode 100644 (execbit !)
+ new mode 100755 (execbit !)
--- a/b
+++ b/b
@@ -1,1 +1,2 @@
@@ -322,41 +289,93 @@
Test extdiff with --option:
$ hg extdiff -p echo -o this -c 1
- this */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob)
+ this "*\\a.8a5febb7f867\\a" "a.34eed99112ab\\a" (glob) (windows !)
+ this */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob) (no-windows !)
[1]
$ hg falabala -o this -c 1
- diffing this */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob)
+ diffing this "*\\a.8a5febb7f867\\a" "a.34eed99112ab\\a" (glob) (windows !)
+ diffing this */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob) (no-windows !)
[1]
Test extdiff's handling of options with spaces in them:
$ hg edspace -c 1
- name <user@example.com> */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob)
+ "name <user@example.com>" "*\\a.8a5febb7f867\\a" "a.34eed99112ab\\a" (glob) (windows !)
+ name <user@example.com> */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob) (no-windows !)
[1]
$ hg extdiff -p echo -o "name <user@example.com>" -c 1
- name <user@example.com> */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob)
+ "name <user@example.com>" "*\\a.8a5febb7f867\\a" "a.34eed99112ab\\a" (glob) (windows !)
+ name <user@example.com> */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob) (no-windows !)
[1]
Test with revsets:
$ hg extdif -p echo -c "rev(1)"
- */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob)
+ "*\\a.8a5febb7f867\\a" "a.34eed99112ab\\a" (glob) (windows !)
+ */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob) (no-windows !)
[1]
$ hg extdif -p echo -r "0::1"
- */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob)
+ "*\\a.8a5febb7f867\\a" "a.34eed99112ab\\a" (glob) (windows !)
+ */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob) (no-windows !)
[1]
Fallback to merge-tools.tool.executable|regkey
$ mkdir dir
- $ cat > 'dir/tool.sh' << EOF
+ $ cat > 'dir/tool.sh' << 'EOF'
> #!/bin/sh
+ > # Mimic a tool that syncs all attrs, including mtime
+ > cp $1/a $2/a
+ > touch -r $1/a $2/a
+ > chmod +x $2/a
> echo "** custom diff **"
> EOF
+#if execbit
$ chmod +x dir/tool.sh
+#endif
+
+Windows can't run *.sh directly, so create a shim executable that can be.
+Without something executable, the next hg command will try to run `tl` instead
+of $tool (and fail).
+#if windows
+ $ cat > dir/tool.bat <<EOF
+ > @sh -c "`pwd`/dir/tool.sh %1 %2"
+ > EOF
+ $ tool=`pwd`/dir/tool.bat
+#else
$ tool=`pwd`/dir/tool.sh
+#endif
+
+ $ cat a
+ changed
+ edited
+ $ hg --debug tl --config extdiff.tl= --config merge-tools.tl.executable=$tool
+ making snapshot of 2 files from rev * (glob)
+ a
+ b
+ making snapshot of 2 files from working directory
+ a
+ b
+ running '$TESTTMP/a/dir/tool.bat a.* a' in */extdiff.* (glob) (windows !)
+ running '$TESTTMP/a/dir/tool.sh a.* a' in */extdiff.* (glob) (no-windows !)
+ ** custom diff **
+ file changed while diffing. Overwriting: $TESTTMP/a/a (src: */extdiff.*/a/a) (glob)
+ cleaning up temp directory
+ [1]
+ $ cat a
+ a
+
+#if execbit
+ $ [ -x a ]
+
+ $ cat > 'dir/tool.sh' << 'EOF'
+ > #!/bin/sh
+ > chmod -x $2/a
+ > echo "** custom diff **"
+ > EOF
+
$ hg --debug tl --config extdiff.tl= --config merge-tools.tl.executable=$tool
making snapshot of 2 files from rev * (glob)
a
@@ -366,12 +385,15 @@
b
running '$TESTTMP/a/dir/tool.sh a.* a' in */extdiff.* (glob)
** custom diff **
+ file changed while diffing. Overwriting: $TESTTMP/a/a (src: */extdiff.*/a/a) (glob)
cleaning up temp directory
[1]
- $ cd ..
+ $ [ -x a ]
+ [1]
+#endif
-#endif
+ $ cd ..
#if symlink
--- a/tests/test-extension.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-extension.t Tue Jun 20 16:33:46 2017 -0400
@@ -2,9 +2,9 @@
$ cat > foobar.py <<EOF
> import os
- > from mercurial import cmdutil, commands
+ > from mercurial import commands, registrar
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
> def uisetup(ui):
> ui.write("uisetup called\\n")
> ui.flush()
@@ -98,7 +98,7 @@
Check hgweb's load order:
$ cat > hgweb.cgi <<EOF
- > #!/usr/bin/env python
+ > #!$PYTHON
> from mercurial import demandimport; demandimport.enable()
> from mercurial.hgweb import hgweb
> from mercurial.hgweb import wsgicgi
@@ -380,9 +380,9 @@
$ cat > $TESTTMP/absextroot/__init__.py <<EOF
> from __future__ import absolute_import
- > from mercurial import cmdutil
+ > from mercurial import registrar
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
>
> # "absolute" and "relative" shouldn't be imported before actual
> # command execution, because (1) they import same modules, and (2)
@@ -444,9 +444,9 @@
> EOF
$ cat > $TESTTMP/checkrelativity.py <<EOF
- > from mercurial import cmdutil
+ > from mercurial import registrar
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
>
> # demand import avoids failure of importing notexist here
> import extlibroot.lsub1.lsub2.notexist
@@ -487,9 +487,9 @@
$ cat > debugextension.py <<EOF
> '''only debugcommands
> '''
- > from mercurial import cmdutil
+ > from mercurial import registrar
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
> @command('debugfoobar', [], 'hg debugfoobar')
> def debugfoobar(ui, repo, *args, **opts):
> "yet another debug command"
@@ -726,9 +726,9 @@
Test help topic with same name as extension
$ cat > multirevs.py <<EOF
- > from mercurial import cmdutil, commands
+ > from mercurial import commands, registrar
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
> """multirevs extension
> Big multi-line module docstring."""
> @command('multirevs', [], 'ARG', norepo=True)
@@ -803,9 +803,9 @@
> This is an awesome 'dodo' extension. It does nothing and
> writes 'Foo foo'
> """
- > from mercurial import cmdutil, commands
+ > from mercurial import commands, registrar
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
> @command('dodo', [], 'hg dodo')
> def dodo(ui, *args, **kwargs):
> """Does nothing"""
@@ -914,9 +914,9 @@
> This is an awesome 'dudu' extension. It does something and
> also writes 'Beep beep'
> """
- > from mercurial import cmdutil, commands
+ > from mercurial import commands, registrar
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
> @command('something', [], 'hg something')
> def something(ui, *args, **kwargs):
> """Does something"""
@@ -1157,9 +1157,9 @@
[255]
$ cat > throw.py <<EOF
- > from mercurial import cmdutil, commands, util
+ > from mercurial import commands, registrar, util
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
> class Bogon(Exception): pass
> @command('throw', [], 'hg throw', norepo=True)
> def throw(ui, **opts):
@@ -1534,6 +1534,40 @@
$ cd ..
+Prohibit registration of commands that don't use @command (issue5137)
+
+ $ hg init deprecated
+ $ cd deprecated
+
+ $ cat <<EOF > deprecatedcmd.py
+ > def deprecatedcmd(repo, ui):
+ > pass
+ > cmdtable = {
+ > 'deprecatedcmd': (deprecatedcmd, [], ''),
+ > }
+ > EOF
+ $ cat <<EOF > .hg/hgrc
+ > [extensions]
+ > deprecatedcmd = `pwd`/deprecatedcmd.py
+ > mq = !
+ > hgext.mq = !
+ > hgext/mq = !
+ > EOF
+
+ $ hg deprecatedcmd > /dev/null
+ *** failed to import extension deprecatedcmd from $TESTTMP/deprecated/deprecatedcmd.py: missing attributes: norepo, optionalrepo, inferrepo
+ *** (use @command decorator to register 'deprecatedcmd')
+ hg: unknown command 'deprecatedcmd'
+ [255]
+
+ the extension shouldn't be loaded at all so the mq works:
+
+ $ hg qseries --config extensions.mq= > /dev/null
+ *** failed to import extension deprecatedcmd from $TESTTMP/deprecated/deprecatedcmd.py: missing attributes: norepo, optionalrepo, inferrepo
+ *** (use @command decorator to register 'deprecatedcmd')
+
+ $ cd ..
+
Test synopsis and docstring extending
$ hg init exthelp
@@ -1556,4 +1590,70 @@
$ hg help bookmarks | grep GREPME
hg bookmarks [OPTIONS]... [NAME]... GREPME [--foo] [-x]
GREPME make sure that this is in the help!
+ $ cd ..
+Show deprecation warning for the use of cmdutil.command
+
+ $ cat > nonregistrar.py <<EOF
+ > from mercurial import cmdutil
+ > cmdtable = {}
+ > command = cmdutil.command(cmdtable)
+ > @command('foo', [], norepo=True)
+ > def foo(ui):
+ > pass
+ > EOF
+
+ $ hg --config extensions.nonregistrar=`pwd`/nonregistrar.py version > /dev/null
+ devel-warn: cmdutil.command is deprecated, use registrar.command to register 'foo'
+ (compatibility will be dropped after Mercurial-4.6, update your code.) * (glob)
+
+Make sure a broken uisetup doesn't globally break hg:
+ $ cat > $TESTTMP/baduisetup.py <<EOF
+ > from mercurial import (
+ > bdiff,
+ > extensions,
+ > )
+ >
+ > def blockswrapper(orig, *args, **kwargs):
+ > return orig(*args, **kwargs)
+ >
+ > def uisetup(ui):
+ > extensions.wrapfunction(bdiff, 'blocks', blockswrapper)
+ > EOF
+ $ cat >> $HGRCPATH <<EOF
+ > [extensions]
+ > baduisetup = $TESTTMP/baduisetup.py
+ > EOF
+
+Even though the extension fails during uisetup, hg is still basically usable:
+ $ hg version
+ *** failed to set up extension baduisetup: No module named bdiff
+ Mercurial Distributed SCM (version *) (glob)
+ (see https://mercurial-scm.org for more information)
+
+ Copyright (C) 2005-2017 Matt Mackall and others
+ This is free software; see the source for copying conditions. There is NO
+ warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+ $ hg version --traceback
+ Traceback (most recent call last):
+ File "*/mercurial/extensions.py", line *, in _runuisetup (glob)
+ uisetup(ui)
+ File "$TESTTMP/baduisetup.py", line 10, in uisetup
+ extensions.wrapfunction(bdiff, 'blocks', blockswrapper)
+ File "*/mercurial/extensions.py", line *, in wrapfunction (glob)
+ origfn = getattr(container, funcname)
+ File "*/hgdemandimport/demandimportpy2.py", line *, in __getattr__ (glob)
+ self._load()
+ File "*/hgdemandimport/demandimportpy2.py", line *, in _load (glob)
+ mod = _hgextimport(_import, head, globals, locals, None, level)
+ File "*/hgdemandimport/demandimportpy2.py", line *, in _hgextimport (glob)
+ return importfunc(name, globals, *args, **kwargs)
+ ImportError: No module named bdiff
+ *** failed to set up extension baduisetup: No module named bdiff
+ Mercurial Distributed SCM (version *) (glob)
+ (see https://mercurial-scm.org for more information)
+
+ Copyright (C) 2005-2017 Matt Mackall and others
+ This is free software; see the source for copying conditions. There is NO
+ warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
--- a/tests/test-fileset-generated.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-fileset-generated.t Tue Jun 20 16:33:46 2017 -0400
@@ -2,15 +2,15 @@
Set up history and working copy
- $ python $TESTDIR/generate-working-copy-states.py state 2 1
+ $ $PYTHON $TESTDIR/generate-working-copy-states.py state 2 1
$ hg addremove -q --similarity 0
$ hg commit -m first
- $ python $TESTDIR/generate-working-copy-states.py state 2 2
+ $ $PYTHON $TESTDIR/generate-working-copy-states.py state 2 2
$ hg addremove -q --similarity 0
$ hg commit -m second
- $ python $TESTDIR/generate-working-copy-states.py state 2 wc
+ $ $PYTHON $TESTDIR/generate-working-copy-states.py state 2 wc
$ hg addremove -q --similarity 0
$ hg forget *_*_*-untracked
$ rm *_*_missing-*
--- a/tests/test-flagprocessor.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-flagprocessor.t Tue Jun 20 16:33:46 2017 -0400
@@ -161,7 +161,8 @@
> EOF
$ echo 'this should fail' > file
$ hg commit -Aqm 'add file'
- abort: cannot register multiple processors on flag '0x8'.
+ *** failed to set up extension duplicate: cannot register multiple processors on flag '0x8'.
+ abort: missing processor for flag '0x1'!
[255]
$ cd ..
--- a/tests/test-gendoc.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-gendoc.t Tue Jun 20 16:33:46 2017 -0400
@@ -8,7 +8,7 @@
$ { echo C; ls "$TESTDIR/../i18n"/*.po | sort; } | while read PO; do
> LOCALE=`basename "$PO" .po`
> echo "% extracting documentation from $LOCALE"
- > LANGUAGE=$LOCALE python "$TESTDIR/../doc/gendoc.py" >> gendoc-$LOCALE.txt 2> /dev/null || exit
+ > LANGUAGE=$LOCALE $PYTHON "$TESTDIR/../doc/gendoc.py" >> gendoc-$LOCALE.txt 2> /dev/null || exit
>
> if [ $LOCALE != C ]; then
> if [ ! -f $TESTDIR/test-gendoc-$LOCALE.t ]; then
--- a/tests/test-glog.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-glog.t Tue Jun 20 16:33:46 2017 -0400
@@ -1513,7 +1513,7 @@
('symbol', 'date')
('string', '2 0 to 4 0')))
$ hg log -G -d 'brace ) in a date'
- abort: invalid date: 'brace ) in a date'
+ hg: parse error: invalid date: 'brace ) in a date'
[255]
$ testlog --prune 31 --prune 32
[]
--- a/tests/test-graft.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-graft.t Tue Jun 20 16:33:46 2017 -0400
@@ -177,6 +177,7 @@
b
committing manifest
committing changelog
+ updating the branch cache
grafting 5:97f8bfe72746 "5"
searching for copies back to rev 1
unmatched files in other (from topological common ancestor):
@@ -186,11 +187,11 @@
ancestor: 4c60f11aa304, local: 6b9e5368ca4e+, remote: 97f8bfe72746
e: remote is newer -> g
getting e
- b: remote unchanged -> k
committing files:
e
committing manifest
committing changelog
+ updating the branch cache
$ HGEDITOR=cat hg graft 4 3 --log --debug
scanning for duplicate grafts
grafting 4:9c233e8e184d "4"
@@ -203,7 +204,6 @@
preserving e for resolve of e
d: remote is newer -> g
getting d
- b: remote unchanged -> k
e: versions differ -> m (premerge)
picked tool ':merge' for e (binary False symlink False changedelete False)
merging e
--- a/tests/test-hardlinks-whitelisted.t Tue Jun 13 22:24:41 2017 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,389 +0,0 @@
-#require hardlink
-#require hardlink-whitelisted
-
-This test is similar to test-hardlinks.t, but will only run on some filesystems
-that we are sure to have known good hardlink supports (see issue4546 for an
-example where the filesystem claims hardlink support but is actually
-problematic).
-
- $ cat > nlinks.py <<EOF
- > import sys
- > from mercurial import util
- > for f in sorted(sys.stdin.readlines()):
- > f = f[:-1]
- > print util.nlinks(f), f
- > EOF
-
- $ nlinksdir()
- > {
- > find $1 -type f | python $TESTTMP/nlinks.py
- > }
-
-Some implementations of cp can't create hardlinks (replaces 'cp -al' on Linux):
-
- $ cat > linkcp.py <<EOF
- > from mercurial import util
- > import sys
- > util.copyfiles(sys.argv[1], sys.argv[2], hardlink=True)
- > EOF
-
- $ linkcp()
- > {
- > python $TESTTMP/linkcp.py $1 $2
- > }
-
-Prepare repo r1:
-
- $ hg init r1
- $ cd r1
-
- $ echo c1 > f1
- $ hg add f1
- $ hg ci -m0
-
- $ mkdir d1
- $ cd d1
- $ echo c2 > f2
- $ hg add f2
- $ hg ci -m1
- $ cd ../..
-
- $ nlinksdir r1/.hg/store
- 1 r1/.hg/store/00changelog.i
- 1 r1/.hg/store/00manifest.i
- 1 r1/.hg/store/data/d1/f2.i
- 1 r1/.hg/store/data/f1.i
- 1 r1/.hg/store/fncache
- 1 r1/.hg/store/phaseroots
- 1 r1/.hg/store/undo
- 1 r1/.hg/store/undo.backup.fncache
- 1 r1/.hg/store/undo.backupfiles
- 1 r1/.hg/store/undo.phaseroots
-
-
-Create hardlinked clone r2:
-
- $ hg clone -U --debug r1 r2 --config progress.debug=true
- linking: 1
- linking: 2
- linking: 3
- linking: 4
- linking: 5
- linking: 6
- linking: 7
- linked 7 files
-
-Create non-hardlinked clone r3:
-
- $ hg clone --pull r1 r3
- requesting all changes
- adding changesets
- adding manifests
- adding file changes
- added 2 changesets with 2 changes to 2 files
- updating to branch default
- 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
-
-
-Repos r1 and r2 should now contain hardlinked files:
-
- $ nlinksdir r1/.hg/store
- 2 r1/.hg/store/00changelog.i
- 2 r1/.hg/store/00manifest.i
- 2 r1/.hg/store/data/d1/f2.i
- 2 r1/.hg/store/data/f1.i
- 2 r1/.hg/store/fncache
- 1 r1/.hg/store/phaseroots
- 1 r1/.hg/store/undo
- 1 r1/.hg/store/undo.backup.fncache
- 1 r1/.hg/store/undo.backupfiles
- 1 r1/.hg/store/undo.phaseroots
-
- $ nlinksdir r2/.hg/store
- 2 r2/.hg/store/00changelog.i
- 2 r2/.hg/store/00manifest.i
- 2 r2/.hg/store/data/d1/f2.i
- 2 r2/.hg/store/data/f1.i
- 2 r2/.hg/store/fncache
-
-Repo r3 should not be hardlinked:
-
- $ nlinksdir r3/.hg/store
- 1 r3/.hg/store/00changelog.i
- 1 r3/.hg/store/00manifest.i
- 1 r3/.hg/store/data/d1/f2.i
- 1 r3/.hg/store/data/f1.i
- 1 r3/.hg/store/fncache
- 1 r3/.hg/store/phaseroots
- 1 r3/.hg/store/undo
- 1 r3/.hg/store/undo.backupfiles
- 1 r3/.hg/store/undo.phaseroots
-
-
-Create a non-inlined filelog in r3:
-
- $ cd r3/d1
- >>> f = open('data1', 'wb')
- >>> for x in range(10000):
- ... f.write("%s\n" % str(x))
- >>> f.close()
- $ for j in 0 1 2 3 4 5 6 7 8 9; do
- > cat data1 >> f2
- > hg commit -m$j
- > done
- $ cd ../..
-
- $ nlinksdir r3/.hg/store
- 1 r3/.hg/store/00changelog.i
- 1 r3/.hg/store/00manifest.i
- 1 r3/.hg/store/data/d1/f2.d
- 1 r3/.hg/store/data/d1/f2.i
- 1 r3/.hg/store/data/f1.i
- 1 r3/.hg/store/fncache
- 1 r3/.hg/store/phaseroots
- 1 r3/.hg/store/undo
- 1 r3/.hg/store/undo.backup.fncache
- 1 r3/.hg/store/undo.backup.phaseroots
- 1 r3/.hg/store/undo.backupfiles
- 1 r3/.hg/store/undo.phaseroots
-
-Push to repo r1 should break up most hardlinks in r2:
-
- $ hg -R r2 verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- 2 files, 2 changesets, 2 total revisions
-
- $ cd r3
- $ hg push
- pushing to $TESTTMP/r1 (glob)
- searching for changes
- adding changesets
- adding manifests
- adding file changes
- added 10 changesets with 10 changes to 1 files
-
- $ cd ..
-
- $ nlinksdir r2/.hg/store
- 1 r2/.hg/store/00changelog.i
- 1 r2/.hg/store/00manifest.i
- 1 r2/.hg/store/data/d1/f2.i
- 2 r2/.hg/store/data/f1.i
- 2 r2/.hg/store/fncache
-
- $ hg -R r2 verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- 2 files, 2 changesets, 2 total revisions
-
-
- $ cd r1
- $ hg up
- 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
-
-Committing a change to f1 in r1 must break up hardlink f1.i in r2:
-
- $ echo c1c1 >> f1
- $ hg ci -m00
- $ cd ..
-
- $ nlinksdir r2/.hg/store
- 1 r2/.hg/store/00changelog.i
- 1 r2/.hg/store/00manifest.i
- 1 r2/.hg/store/data/d1/f2.i
- 1 r2/.hg/store/data/f1.i
- 2 r2/.hg/store/fncache
-
-
- $ cd r3
- $ hg tip --template '{rev}:{node|short}\n'
- 11:a6451b6bc41f
- $ echo bla > f1
- $ hg ci -m1
- $ cd ..
-
-Create hardlinked copy r4 of r3 (on Linux, we would call 'cp -al'):
-
- $ linkcp r3 r4
-
-r4 has hardlinks in the working dir (not just inside .hg):
-
- $ nlinksdir r4
- 2 r4/.hg/00changelog.i
- 2 r4/.hg/branch
- 2 r4/.hg/cache/branch2-served
- 2 r4/.hg/cache/checkisexec
- 3 r4/.hg/cache/checklink (?)
- ? r4/.hg/cache/checklink-target (glob)
- 2 r4/.hg/cache/checknoexec
- 2 r4/.hg/cache/rbc-names-v1
- 2 r4/.hg/cache/rbc-revs-v1
- 2 r4/.hg/dirstate
- 2 r4/.hg/hgrc
- 2 r4/.hg/last-message.txt
- 2 r4/.hg/requires
- 2 r4/.hg/store/00changelog.i
- 2 r4/.hg/store/00manifest.i
- 2 r4/.hg/store/data/d1/f2.d
- 2 r4/.hg/store/data/d1/f2.i
- 2 r4/.hg/store/data/f1.i
- 2 r4/.hg/store/fncache
- 2 r4/.hg/store/phaseroots
- 2 r4/.hg/store/undo
- 2 r4/.hg/store/undo.backup.fncache
- 2 r4/.hg/store/undo.backup.phaseroots
- 2 r4/.hg/store/undo.backupfiles
- 2 r4/.hg/store/undo.phaseroots
- 4 r4/.hg/undo.backup.dirstate
- 2 r4/.hg/undo.bookmarks
- 2 r4/.hg/undo.branch
- 2 r4/.hg/undo.desc
- 4 r4/.hg/undo.dirstate
- 2 r4/d1/data1
- 2 r4/d1/f2
- 2 r4/f1
-
-Update back to revision 11 in r4 should break hardlink of file f1:
-
- $ hg -R r4 up 11
- 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
-
- $ nlinksdir r4
- 2 r4/.hg/00changelog.i
- 1 r4/.hg/branch
- 2 r4/.hg/cache/branch2-served
- 2 r4/.hg/cache/checkisexec
- 2 r4/.hg/cache/checklink-target
- 2 r4/.hg/cache/checknoexec
- 2 r4/.hg/cache/rbc-names-v1
- 2 r4/.hg/cache/rbc-revs-v1
- 1 r4/.hg/dirstate
- 2 r4/.hg/hgrc
- 2 r4/.hg/last-message.txt
- 2 r4/.hg/requires
- 2 r4/.hg/store/00changelog.i
- 2 r4/.hg/store/00manifest.i
- 2 r4/.hg/store/data/d1/f2.d
- 2 r4/.hg/store/data/d1/f2.i
- 2 r4/.hg/store/data/f1.i
- 2 r4/.hg/store/fncache
- 2 r4/.hg/store/phaseroots
- 2 r4/.hg/store/undo
- 2 r4/.hg/store/undo.backup.fncache
- 2 r4/.hg/store/undo.backup.phaseroots
- 2 r4/.hg/store/undo.backupfiles
- 2 r4/.hg/store/undo.phaseroots
- 4 r4/.hg/undo.backup.dirstate
- 2 r4/.hg/undo.bookmarks
- 2 r4/.hg/undo.branch
- 2 r4/.hg/undo.desc
- 4 r4/.hg/undo.dirstate
- 2 r4/d1/data1
- 2 r4/d1/f2
- 1 r4/f1
-
-
-Test hardlinking outside hg:
-
- $ mkdir x
- $ echo foo > x/a
-
- $ linkcp x y
- $ echo bar >> y/a
-
-No diff if hardlink:
-
- $ diff x/a y/a
-
-Test mq hardlinking:
-
- $ echo "[extensions]" >> $HGRCPATH
- $ echo "mq=" >> $HGRCPATH
-
- $ hg init a
- $ cd a
-
- $ hg qimport -n foo - << EOF
- > # HG changeset patch
- > # Date 1 0
- > diff -r 2588a8b53d66 a
- > --- /dev/null Thu Jan 01 00:00:00 1970 +0000
- > +++ b/a Wed Jul 23 15:54:29 2008 +0200
- > @@ -0,0 +1,1 @@
- > +a
- > EOF
- adding foo to series file
-
- $ hg qpush
- applying foo
- now at: foo
-
- $ cd ..
- $ linkcp a b
- $ cd b
-
- $ hg qimport -n bar - << EOF
- > # HG changeset patch
- > # Date 2 0
- > diff -r 2588a8b53d66 a
- > --- /dev/null Thu Jan 01 00:00:00 1970 +0000
- > +++ b/b Wed Jul 23 15:54:29 2008 +0200
- > @@ -0,0 +1,1 @@
- > +b
- > EOF
- adding bar to series file
-
- $ hg qpush
- applying bar
- now at: bar
-
- $ cat .hg/patches/status
- 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo
- 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c:bar
-
- $ cat .hg/patches/series
- foo
- bar
-
- $ cat ../a/.hg/patches/status
- 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo
-
- $ cat ../a/.hg/patches/series
- foo
-
-Test tags hardlinking:
-
- $ hg qdel -r qbase:qtip
- patch foo finalized without changeset message
- patch bar finalized without changeset message
-
- $ hg tag -l lfoo
- $ hg tag foo
-
- $ cd ..
- $ linkcp b c
- $ cd c
-
- $ hg tag -l -r 0 lbar
- $ hg tag -r 0 bar
-
- $ cat .hgtags
- 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo
- 430ed4828a74fa4047bc816a25500f7472ab4bfe bar
-
- $ cat .hg/localtags
- 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo
- 430ed4828a74fa4047bc816a25500f7472ab4bfe lbar
-
- $ cat ../b/.hgtags
- 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo
-
- $ cat ../b/.hg/localtags
- 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo
-
- $ cd ..
--- a/tests/test-hardlinks.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-hardlinks.t Tue Jun 20 16:33:46 2017 -0400
@@ -10,7 +10,7 @@
$ nlinksdir()
> {
- > find $1 -type f | python $TESTTMP/nlinks.py
+ > find "$@" -type f | $PYTHON $TESTTMP/nlinks.py
> }
Some implementations of cp can't create hardlinks (replaces 'cp -al' on Linux):
@@ -23,7 +23,7 @@
$ linkcp()
> {
- > python $TESTTMP/linkcp.py $1 $2
+ > $PYTHON $TESTTMP/linkcp.py $1 $2
> }
Prepare repo r1:
@@ -168,6 +168,11 @@
2 r2/.hg/store/data/f1.i
[12] r2/\.hg/store/fncache (re)
+#if hardlink-whitelisted
+ $ nlinksdir r2/.hg/store/fncache
+ 2 r2/.hg/store/fncache
+#endif
+
$ hg -R r2 verify
checking changesets
checking manifests
@@ -193,11 +198,23 @@
1 r2/.hg/store/data/f1.i
[12] r2/\.hg/store/fncache (re)
+#if hardlink-whitelisted
+ $ nlinksdir r2/.hg/store/fncache
+ 2 r2/.hg/store/fncache
+#endif
+
+Create a file which exec permissions we will change
+ $ cd r3
+ $ echo "echo hello world" > f3
+ $ hg add f3
+ $ hg ci -mf3
+ $ cd ..
$ cd r3
$ hg tip --template '{rev}:{node|short}\n'
- 11:a6451b6bc41f
+ 12:d3b77733a28a
$ echo bla > f1
+ $ chmod +x f3
$ hg ci -m1
$ cd ..
@@ -205,14 +222,20 @@
$ linkcp r3 r4
+'checklink' is produced by hardlinking a symlink, which is undefined whether
+the symlink should be followed or not. It does behave differently on Linux and
+BSD. Just remove it so the test pass on both platforms.
+
+ $ rm -f r4/.hg/cache/checklink
+
r4 has hardlinks in the working dir (not just inside .hg):
$ nlinksdir r4
2 r4/.hg/00changelog.i
2 r4/.hg/branch
+ 2 r4/.hg/cache/branch2-base
2 r4/.hg/cache/branch2-served
2 r4/.hg/cache/checkisexec (execbit !)
- 3 r4/.hg/cache/checklink (?)
? r4/.hg/cache/checklink-target (glob) (symlink !)
2 r4/.hg/cache/checknoexec (execbit !)
2 r4/.hg/cache/rbc-names-v1
@@ -226,6 +249,7 @@
2 r4/.hg/store/data/d1/f2.d
2 r4/.hg/store/data/d1/f2.i
2 r4/.hg/store/data/f1.i
+ 2 r4/.hg/store/data/f3.i
2 r4/.hg/store/fncache
2 r4/.hg/store/phaseroots
2 r4/.hg/store/undo
@@ -241,15 +265,24 @@
2 r4/d1/data1
2 r4/d1/f2
2 r4/f1
-
-Update back to revision 11 in r4 should break hardlink of file f1:
+ 2 r4/f3
- $ hg -R r4 up 11
- 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+Update back to revision 12 in r4 should break hardlink of file f1 and f3:
+#if hardlink-whitelisted
+ $ nlinksdir r4/.hg/undo.backup.dirstate r4/.hg/undo.dirstate
+ 4 r4/.hg/undo.backup.dirstate
+ 4 r4/.hg/undo.dirstate
+#endif
+
+
+ $ hg -R r4 up 12
+ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (execbit !)
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (no-execbit !)
$ nlinksdir r4
2 r4/.hg/00changelog.i
1 r4/.hg/branch
+ 2 r4/.hg/cache/branch2-base
2 r4/.hg/cache/branch2-served
2 r4/.hg/cache/checkisexec (execbit !)
2 r4/.hg/cache/checklink-target (symlink !)
@@ -265,6 +298,7 @@
2 r4/.hg/store/data/d1/f2.d
2 r4/.hg/store/data/d1/f2.i
2 r4/.hg/store/data/f1.i
+ 2 r4/.hg/store/data/f3.i
2 r4/.hg/store/fncache
2 r4/.hg/store/phaseroots
2 r4/.hg/store/undo
@@ -280,7 +314,14 @@
2 r4/d1/data1
2 r4/d1/f2
1 r4/f1
+ 1 r4/f3 (execbit !)
+ 2 r4/f3 (no-execbit !)
+#if hardlink-whitelisted
+ $ nlinksdir r4/.hg/undo.backup.dirstate r4/.hg/undo.dirstate
+ 4 r4/.hg/undo.backup.dirstate
+ 4 r4/.hg/undo.dirstate
+#endif
Test hardlinking outside hg:
--- a/tests/test-help.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-help.t Tue Jun 20 16:33:46 2017 -0400
@@ -680,26 +680,26 @@
$ cat > helpext.py <<EOF
> import os
- > from mercurial import cmdutil, commands
+ > from mercurial import commands, registrar
>
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
>
- > @command('nohelp',
- > [('', 'longdesc', 3, 'x'*90),
- > ('n', '', None, 'normal desc'),
- > ('', 'newline', '', 'line1\nline2')],
- > 'hg nohelp',
+ > @command(b'nohelp',
+ > [(b'', b'longdesc', 3, b'x'*90),
+ > (b'n', b'', None, b'normal desc'),
+ > (b'', b'newline', b'', b'line1\nline2')],
+ > b'hg nohelp',
> norepo=True)
- > @command('debugoptADV', [('', 'aopt', None, 'option is (ADVANCED)')])
- > @command('debugoptDEP', [('', 'dopt', None, 'option is (DEPRECATED)')])
- > @command('debugoptEXP', [('', 'eopt', None, 'option is (EXPERIMENTAL)')])
+ > @command(b'debugoptADV', [(b'', b'aopt', None, b'option is (ADVANCED)')])
+ > @command(b'debugoptDEP', [(b'', b'dopt', None, b'option is (DEPRECATED)')])
+ > @command(b'debugoptEXP', [(b'', b'eopt', None, b'option is (EXPERIMENTAL)')])
> def nohelp(ui, *args, **kwargs):
> pass
>
> def uisetup(ui):
- > ui.setconfig('alias', 'shellalias', '!echo hi', 'helpext')
- > ui.setconfig('alias', 'hgalias', 'summary', 'helpext')
+ > ui.setconfig(b'alias', b'shellalias', b'!echo hi', b'helpext')
+ > ui.setconfig(b'alias', b'hgalias', b'summary', b'helpext')
>
> EOF
$ echo '[extensions]' >> $HGRCPATH
@@ -912,6 +912,8 @@
debugoptEXP (no help text available)
debugpathcomplete
complete part or all of a tracked path
+ debugpickmergetool
+ examine which merge tool is chosen for specified file
debugpushkey access the pushkey key/value protocol
debugpvec (no help text available)
debugrebuilddirstate
@@ -929,6 +931,8 @@
show set of successors for revision
debugtemplate
parse and apply a template
+ debugupdatecaches
+ warm all known caches in the repository
debugupgraderepo
upgrade a repository to use different features
debugwalk show how files match on given patterns
@@ -1760,11 +1764,18 @@
accordingly be named "a.txt.local", "a.txt.other" and "a.txt.base" and
they will be placed in the same directory as "a.txt".
+ This implies permerge. Therefore, files aren't dumped, if premerge runs
+ successfully. Use :forcedump to forcibly write files out.
+
":fail"
Rather than attempting to merge files that were modified on both
branches, it marks them as unresolved. The resolve command must be used
to resolve these conflicts.
+ ":forcedump"
+ Creates three versions of the files as same as :dump, but omits
+ premerge.
+
":local"
Uses the local 'p1()' version of files as the merged version.
@@ -1856,7 +1867,7 @@
Test usage of section marks in help documents
$ cd "$TESTDIR"/../doc
- $ python check-seclevel.py
+ $ $PYTHON check-seclevel.py
$ cd $TESTTMP
#if serve
@@ -1904,9 +1915,10 @@
<div class="main">
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -2498,7 +2510,7 @@
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -2678,7 +2690,7 @@
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -2879,7 +2891,7 @@
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -2982,9 +2994,10 @@
<div class="main">
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -3089,7 +3102,7 @@
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
--- a/tests/test-hghave.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-hghave.t Tue Jun 20 16:33:46 2017 -0400
@@ -20,7 +20,7 @@
> EOF
$ run-tests.py $HGTEST_RUN_TESTS_PURE test-hghaveaddon.t
.
- # Ran 1 tests, 0 skipped, 0 warned, 0 failed.
+ # Ran 1 tests, 0 skipped, 0 failed.
(invocation via command line)
--- a/tests/test-hgignore.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-hgignore.t Tue Jun 20 16:33:46 2017 -0400
@@ -1,6 +1,10 @@
$ hg init ignorerepo
$ cd ignorerepo
+debugignore with no hgignore should be deterministic:
+ $ hg debugignore
+ <nevermatcher>
+
Issue562: .hgignore requires newline at end:
$ touch foo
@@ -15,7 +19,7 @@
> f.close()
> EOF
- $ python makeignore.py
+ $ $PYTHON makeignore.py
Should display baz only:
@@ -164,7 +168,7 @@
A b.o
$ hg debugignore
- (?:(?:|.*/)[^/]*(?:/|$))
+ <includematcher includes='(?:(?:|.*/)[^/]*(?:/|$))'>
$ hg debugignore b.o
b.o is ignored
--- a/tests/test-hgweb-commands.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-hgweb-commands.t Tue Jun 20 16:33:46 2017 -0400
@@ -747,6 +747,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>log</h3>
+
<form class="search" action="/log">
<p><input name="rev" id="search1" type="text" size="30" value="" /></p>
@@ -882,9 +883,10 @@
<span class="tag">1.0</span> <span class="tag">anotherthing</span>
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -1025,9 +1027,10 @@
</p>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" value="base"></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="base" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -1312,9 +1315,10 @@
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -1440,9 +1444,10 @@
<span class="branchname">stable</span>
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -1528,15 +1533,10 @@
<div class="page_header">
<a href="https://mercurial-scm.org/" title="Mercurial" style="float: right;">Mercurial</a>
<a href="/">Mercurial</a> / summary
- <form action="/log">
- <input type="hidden" name="style" value="gitweb" />
- <div class="search">
- <input type="text" name="rev" />
- </div>
- </form>
</div>
<div class="page_nav">
+ <div>
summary |
<a href="/shortlog?style=gitweb">shortlog</a> |
<a href="/log?style=gitweb">changelog</a> |
@@ -1546,7 +1546,16 @@
<a href="/branches?style=gitweb">branches</a> |
<a href="/file?style=gitweb">files</a> |
<a href="/help?style=gitweb">help</a>
- <br/>
+ </div>
+
+ <div class="search">
+ <form id="searchform" action="/log">
+ <input type="hidden" name="style" value="gitweb" />
+ <input name="rev" type="text" value="" size="40" />
+ <div id="hint">Find changesets by keywords (author, files, the commit message), revision
+ number or hash, or <a href="/help/revsets">revset expression</a>.</div>
+ </form>
+ </div>
</div>
<div class="title"> </div>
@@ -1729,13 +1738,8 @@
<a href="/">Mercurial</a> / graph
</div>
- <form action="/log">
- <input type="hidden" name="style" value="gitweb" />
- <div class="search">
- <input type="text" name="rev" />
- </div>
- </form>
<div class="page_nav">
+ <div>
<a href="/summary?style=gitweb">summary</a> |
<a href="/shortlog/tip?style=gitweb">shortlog</a> |
<a href="/log/tip?style=gitweb">changelog</a> |
@@ -1748,7 +1752,17 @@
<br/>
<a href="/graph/tip?revcount=30&style=gitweb">less</a>
<a href="/graph/tip?revcount=120&style=gitweb">more</a>
- | <a href="/graph/2ef0ac749a14?style=gitweb">(0)</a> <a href="/graph/tip?style=gitweb">tip</a> <br/>
+ | <a href="/graph/2ef0ac749a14?style=gitweb">(0)</a> <a href="/graph/tip?style=gitweb">tip</a>
+ </div>
+
+ <div class="search">
+ <form id="searchform" action="/log">
+ <input type="hidden" name="style" value="gitweb" />
+ <input name="rev" type="text" value="" size="40" />
+ <div id="hint">Find changesets by keywords (author, files, the commit message), revision
+ number or hash, or <a href="/help/revsets">revset expression</a>.</div>
+ </form>
+ </div>
</div>
<div class="title"> </div>
--- a/tests/test-hgweb-descend-empties.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-hgweb-descend-empties.t Tue Jun 20 16:33:46 2017 -0400
@@ -76,9 +76,10 @@
<span class="tag">tip</span>
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -195,9 +196,10 @@
<span class="tag">tip</span>
</h3>
+
<form class="search" action="/log">
<input type="hidden" name="style" value="coal" />
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -287,13 +289,14 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> / files</h1>
- <form action="/log">
- <input type="hidden" name="style" value="monoblue" />
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+
+ <form action="/log">
+ <input type="hidden" name="style" value="monoblue" />
+ <dl class="search">
+ <dt><label>Search: </label></dt>
+ <dd><input type="text" name="rev" value="" /></dd>
+ </dl>
+ </form>
<ul class="page-nav">
<li><a href="/summary?style=monoblue">summary</a></li>
@@ -405,6 +408,7 @@
</div>
<div class="page_nav">
+ <div>
<a href="/summary?style=gitweb">summary</a> |
<a href="/shortlog?style=gitweb">shortlog</a> |
<a href="/log?style=gitweb">changelog</a> |
@@ -415,7 +419,16 @@
files |
<a href="/rev/tip?style=gitweb">changeset</a> |
<a href="/help?style=gitweb">help</a>
- <br/>
+ </div>
+
+ <div class="search">
+ <form id="searchform" action="/log">
+ <input type="hidden" name="style" value="gitweb" />
+ <input name="rev" type="text" value="" size="40" />
+ <div id="hint">Find changesets by keywords (author, files, the commit message), revision
+ number or hash, or <a href="/help/revsets">revset expression</a>.</div>
+ </form>
+ </div>
</div>
<div class="title">/ <span class="logtags"><span class="branchtag" title="default">default</span> <span class="tagtag" title="tip">tip</span> </span></div>
--- a/tests/test-hgweb-diffs.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-hgweb-diffs.t Tue Jun 20 16:33:46 2017 -0400
@@ -84,9 +84,10 @@
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -256,9 +257,10 @@
<span class="tag">tip</span>
</h3>
+
<form class="search" action="/log">
- <p></p>
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -360,9 +362,10 @@
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -536,9 +539,10 @@
<span class="tag">tip</span>
</h3>
+
<form class="search" action="/log">
- <p></p>
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -641,9 +645,10 @@
</h3>
+
<form class="search" action="/log">
- <p></p>
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -770,9 +775,10 @@
<span class="tag">tip</span>
</h3>
+
<form class="search" action="/log">
- <p></p>
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -901,9 +907,10 @@
<span class="tag">tip</span>
</h3>
+
<form class="search" action="/log">
- <p></p>
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -1038,9 +1045,10 @@
<span class="tag">tip</span>
</h3>
+
<form class="search" action="/log">
- <p></p>
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
--- a/tests/test-hgweb-empty.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-hgweb-empty.t Tue Jun 20 16:33:46 2017 -0400
@@ -59,6 +59,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>log</h3>
+
<form class="search" action="/log">
<p><input name="rev" id="search1" type="text" size="30" value="" /></p>
@@ -169,6 +170,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>log</h3>
+
<form class="search" action="/log">
<p><input name="rev" id="search1" type="text" size="30" value="" /></p>
@@ -275,9 +277,10 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>graph</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -424,9 +427,10 @@
<span class="tag">tip</span>
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
--- a/tests/test-hgweb-filelog.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-hgweb-filelog.t Tue Jun 20 16:33:46 2017 -0400
@@ -193,9 +193,10 @@
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -315,9 +316,10 @@
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -437,9 +439,10 @@
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -550,9 +553,10 @@
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -639,9 +643,10 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>error</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30"></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -739,9 +744,10 @@
(following lines 1:2 <a href="/log/tip/c">back to filelog</a>)
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -858,9 +864,10 @@
(following lines 1:2 <a href="/log/tip/c?revcount=1">back to filelog</a>)
</h3>
+
<form class="search" action="/log">
<input type="hidden" name="revcount" value="1" />
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -1094,9 +1101,10 @@
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -1375,9 +1383,10 @@
(following lines 3:4 <a href="/log/tip/c">back to filelog</a>)
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -1631,9 +1640,10 @@
(following lines 3:4, descending <a href="/log/8/c">back to filelog</a>)
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
--- a/tests/test-hgweb-no-path-info.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-hgweb-no-path-info.t Tue Jun 20 16:33:46 2017 -0400
@@ -70,7 +70,7 @@
> env['QUERY_STRING'] = 'style=raw'
> process(hgwebdir({'repo': '.'}))
> EOF
- $ python request.py
+ $ $PYTHON request.py
---- STATUS
200 Script output follows
---- HEADERS
--- a/tests/test-hgweb-no-request-uri.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-hgweb-no-request-uri.t Tue Jun 20 16:33:46 2017 -0400
@@ -81,7 +81,7 @@
> env['QUERY_STRING'] = 'style=raw'
> process(hgwebdir({'repo': '.'}))
> EOF
- $ python request.py
+ $ $PYTHON request.py
---- STATUS
200 Script output follows
---- HEADERS
--- a/tests/test-hgweb-non-interactive.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-hgweb-non-interactive.t Tue Jun 20 16:33:46 2017 -0400
@@ -76,7 +76,7 @@
> with i._obtainrepo() as repo:
> print sorted([x for x in repo.ui.environ if x.startswith('wsgi')])
> EOF
- $ python request.py
+ $ $PYTHON request.py
---- STATUS
200 Script output follows
---- HEADERS
--- a/tests/test-hgweb-removed.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-hgweb-removed.t Tue Jun 20 16:33:46 2017 -0400
@@ -65,9 +65,10 @@
<span class="tag">tip</span>
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -192,9 +193,10 @@
<span class="tag">tip</span>
</h3>
+
<form class="search" action="/log">
- <p></p>
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
--- a/tests/test-hgweb-symrev.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-hgweb-symrev.t Tue Jun 20 16:33:46 2017 -0400
@@ -482,7 +482,7 @@
<a href="/file/tip?style=gitweb">files</a> |
<a href="/graph/tip?revcount=30&style=gitweb">less</a>
<a href="/graph/tip?revcount=120&style=gitweb">more</a>
- | <a href="/graph/43c799df6e75?style=gitweb">(0)</a> <a href="/graph/tip?style=gitweb">tip</a> <br/>
+ | <a href="/graph/43c799df6e75?style=gitweb">(0)</a> <a href="/graph/tip?style=gitweb">tip</a>
<a href="/graph/tip?revcount=30&style=gitweb">less</a>
<a href="/graph/tip?revcount=120&style=gitweb">more</a>
| <a href="/graph/43c799df6e75?style=gitweb">(0)</a> <a href="/graph/tip?style=gitweb">tip</a>
@@ -572,7 +572,7 @@
<a href="/file/xyzzy?style=gitweb">files</a> |
<a href="/graph/xyzzy?revcount=30&style=gitweb">less</a>
<a href="/graph/xyzzy?revcount=120&style=gitweb">more</a>
- | <a href="/graph/43c799df6e75?style=gitweb">(0)</a> <a href="/graph/tip?style=gitweb">tip</a> <br/>
+ | <a href="/graph/43c799df6e75?style=gitweb">(0)</a> <a href="/graph/tip?style=gitweb">tip</a>
<a href="/graph/xyzzy?revcount=30&style=gitweb">less</a>
<a href="/graph/xyzzy?revcount=120&style=gitweb">more</a>
| <a href="/graph/43c799df6e75?style=gitweb">(0)</a> <a href="/graph/tip?style=gitweb">tip</a>
--- a/tests/test-hgweb.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-hgweb.t Tue Jun 20 16:33:46 2017 -0400
@@ -81,9 +81,10 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>error</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30"></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -188,9 +189,10 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>error</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30"></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -268,9 +270,10 @@
<span class="tag">tip</span> <span class="tag">@</span> <span class="tag">a b c</span> <span class="tag">d/e/f</span>
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -337,7 +340,7 @@
$ get-with-headers.py --twice localhost:$HGPORT 'static/style-gitweb.css' - date etag server
200 Script output follows
- content-length: 8012
+ content-length: 8265
content-type: text/css
body { font-family: sans-serif; font-size: 12px; border:solid #d9d8d1; border-width:1px; margin:10px; background: white; color: black; }
@@ -346,7 +349,12 @@
div.page_header { height:25px; padding:8px; font-size:18px; font-weight:bold; background-color:#d9d8d1; }
div.page_header a:visited { color:#0000cc; }
div.page_header a:hover { color:#880000; }
- div.page_nav { padding:8px; }
+ div.page_nav {
+ padding:8px;
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ }
div.page_nav a:visited { color:#0000cc; }
div.page_path { padding:8px; border:solid #d9d8d1; border-width:0px 0px 1px}
div.page_footer { padding:4px 8px; background-color: #d9d8d1; }
@@ -394,7 +402,23 @@
div.pre { font-family:monospace; font-size:12px; white-space:pre; }
div.diff_info { font-family:monospace; color:#000099; background-color:#edece6; font-style:italic; }
div.index_include { border:solid #d9d8d1; border-width:0px 0px 1px; padding:12px 8px; }
- div.search { margin:4px 8px; position:absolute; top:56px; right:12px }
+
+ .search {
+ margin-right: 8px;
+ }
+
+ div#hint {
+ position: absolute;
+ display: none;
+ width: 250px;
+ padding: 5px;
+ background: #ffc;
+ border: 1px solid yellow;
+ border-radius: 5px;
+ }
+
+ #searchform:hover div#hint { display: block; }
+
tr.thisrev a { color:#999999; text-decoration: none; }
tr.thisrev pre { color:#009900; }
td.annotate {
--- a/tests/test-highlight.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-highlight.t Tue Jun 20 16:33:46 2017 -0400
@@ -20,8 +20,6 @@
create random Python file to exercise Pygments
$ cat <<EOF > primes.py
- > #!/usr/bin/env python
- >
> """Fun with generators. Corresponding Haskell implementation:
>
> primes = 2 : sieve [3, 5..]
@@ -76,7 +74,7 @@
<script type="text/javascript" src="/static/mercurial.js"></script>
<link rel="stylesheet" href="/highlightcss" type="text/css" />
- <title>test: 06824edf55d0 primes.py</title>
+ <title>test: 1af356141006 primes.py</title>
</head>
<body>
@@ -114,13 +112,14 @@
<div class="main">
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>
- view primes.py @ 0:<a href="/rev/06824edf55d0">06824edf55d0</a>
+ view primes.py @ 0:<a href="/rev/1af356141006">1af356141006</a>
<span class="tag">tip</span>
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -150,39 +149,37 @@
<div class="sourcefirst linewraptoggle">line wrap: <a class="linewraplink" href="javascript:toggleLinewrap()">on</a></div>
<div class="sourcefirst"> line source</div>
<pre class="sourcelines stripes4 wrap bottomline" data-logurl="/log/tip/primes.py" data-ishead="1">
- <span id="l1"><span class="c">#!/usr/bin/env python</span></span><a href="#l1"></a>
+ <span id="l1"><span class="sd">"""Fun with generators. Corresponding Haskell implementation:</span></span><a href="#l1"></a>
<span id="l2"></span><a href="#l2"></a>
- <span id="l3"><span class="sd">"""Fun with generators. Corresponding Haskell implementation:</span></span><a href="#l3"></a>
- <span id="l4"></span><a href="#l4"></a>
- <span id="l5"><span class="sd">primes = 2 : sieve [3, 5..]</span></span><a href="#l5"></a>
- <span id="l6"><span class="sd"> where sieve (p:ns) = p : sieve [n | n <- ns, mod n p /= 0]</span></span><a href="#l6"></a>
- <span id="l7"><span class="sd">"""</span></span><a href="#l7"></a>
+ <span id="l3"><span class="sd">primes = 2 : sieve [3, 5..]</span></span><a href="#l3"></a>
+ <span id="l4"><span class="sd"> where sieve (p:ns) = p : sieve [n | n <- ns, mod n p /= 0]</span></span><a href="#l4"></a>
+ <span id="l5"><span class="sd">"""</span></span><a href="#l5"></a>
+ <span id="l6"></span><a href="#l6"></a>
+ <span id="l7"><span class="kn">from</span> <span class="nn">itertools</span> <span class="kn">import</span> <span class="n">dropwhile</span><span class="p">,</span> <span class="n">ifilter</span><span class="p">,</span> <span class="n">islice</span><span class="p">,</span> <span class="n">count</span><span class="p">,</span> <span class="n">chain</span></span><a href="#l7"></a>
<span id="l8"></span><a href="#l8"></a>
- <span id="l9"><span class="kn">from</span> <span class="nn">itertools</span> <span class="kn">import</span> <span class="n">dropwhile</span><span class="p">,</span> <span class="n">ifilter</span><span class="p">,</span> <span class="n">islice</span><span class="p">,</span> <span class="n">count</span><span class="p">,</span> <span class="n">chain</span></span><a href="#l9"></a>
- <span id="l10"></span><a href="#l10"></a>
- <span id="l11"><span class="kn">def</span> <span class="nf">primes</span><span class="p">():</span></span><a href="#l11"></a>
- <span id="l12"> <span class="sd">"""Generate all primes."""</span></span><a href="#l12"></a>
- <span id="l13"> <span class="kn">def</span> <span class="nf">sieve</span><span class="p">(</span><span class="n">ns</span><span class="p">):</span></span><a href="#l13"></a>
- <span id="l14"> <span class="n">p</span> <span class="o">=</span> <span class="n">ns</span><span class="o">.</span><span class="n">next</span><span class="p">()</span></span><a href="#l14"></a>
- <span id="l15"> <span class="c"># It is important to yield *here* in order to stop the</span></span><a href="#l15"></a>
- <span id="l16"> <span class="c"># infinite recursion.</span></span><a href="#l16"></a>
- <span id="l17"> <span class="kn">yield</span> <span class="n">p</span></span><a href="#l17"></a>
- <span id="l18"> <span class="n">ns</span> <span class="o">=</span> <span class="n">ifilter</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">n</span><span class="p">:</span> <span class="n">n</span> <span class="o">%</span> <span class="n">p</span> <span class="o">!=</span> <span class="mi">0</span><span class="p">,</span> <span class="n">ns</span><span class="p">)</span></span><a href="#l18"></a>
- <span id="l19"> <span class="kn">for</span> <span class="n">n</span> <span class="ow">in</span> <span class="n">sieve</span><span class="p">(</span><span class="n">ns</span><span class="p">):</span></span><a href="#l19"></a>
- <span id="l20"> <span class="kn">yield</span> <span class="n">n</span></span><a href="#l20"></a>
- <span id="l21"></span><a href="#l21"></a>
- <span id="l22"> <span class="n">odds</span> <span class="o">=</span> <span class="n">ifilter</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">i</span><span class="p">:</span> <span class="n">i</span> <span class="o">%</span> <span class="mi">2</span> <span class="o">==</span> <span class="mi">1</span><span class="p">,</span> <span class="n">count</span><span class="p">())</span></span><a href="#l22"></a>
- <span id="l23"> <span class="kn">return</span> <span class="n">chain</span><span class="p">([</span><span class="mi">2</span><span class="p">],</span> <span class="n">sieve</span><span class="p">(</span><span class="n">dropwhile</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">n</span><span class="p">:</span> <span class="n">n</span> <span class="o"><</span> <span class="mi">3</span><span class="p">,</span> <span class="n">odds</span><span class="p">)))</span></span><a href="#l23"></a>
- <span id="l24"></span><a href="#l24"></a>
- <span id="l25"><span class="kn">if</span> <span class="n">__name__</span> <span class="o">==</span> <span class="s">"__main__"</span><span class="p">:</span></span><a href="#l25"></a>
- <span id="l26"> <span class="kn">import</span> <span class="nn">sys</span></span><a href="#l26"></a>
- <span id="l27"> <span class="kn">try</span><span class="p">:</span></span><a href="#l27"></a>
- <span id="l28"> <span class="n">n</span> <span class="o">=</span> <span class="nb">int</span><span class="p">(</span><span class="n">sys</span><span class="o">.</span><span class="n">argv</span><span class="p">[</span><span class="mi">1</span><span class="p">])</span></span><a href="#l28"></a>
- <span id="l29"> <span class="kn">except</span> <span class="p">(</span><span class="ne">ValueError</span><span class="p">,</span> <span class="ne">IndexError</span><span class="p">):</span></span><a href="#l29"></a>
- <span id="l30"> <span class="n">n</span> <span class="o">=</span> <span class="mi">10</span></span><a href="#l30"></a>
- <span id="l31"> <span class="n">p</span> <span class="o">=</span> <span class="n">primes</span><span class="p">()</span></span><a href="#l31"></a>
- <span id="l32"> <span class="kn">print</span> <span class="s">"The first </span><span class="si">%d</span><span class="s"> primes: </span><span class="si">%s</span><span class="s">"</span> <span class="o">%</span> <span class="p">(</span><span class="n">n</span><span class="p">,</span> <span class="nb">list</span><span class="p">(</span><span class="n">islice</span><span class="p">(</span><span class="n">p</span><span class="p">,</span> <span class="n">n</span><span class="p">)))</span></span><a href="#l32"></a>
- <span id="l33"></span><a href="#l33"></a></pre>
+ <span id="l9"><span class="kn">def</span> <span class="nf">primes</span><span class="p">():</span></span><a href="#l9"></a>
+ <span id="l10"> <span class="sd">"""Generate all primes."""</span></span><a href="#l10"></a>
+ <span id="l11"> <span class="kn">def</span> <span class="nf">sieve</span><span class="p">(</span><span class="n">ns</span><span class="p">):</span></span><a href="#l11"></a>
+ <span id="l12"> <span class="n">p</span> <span class="o">=</span> <span class="n">ns</span><span class="o">.</span><span class="n">next</span><span class="p">()</span></span><a href="#l12"></a>
+ <span id="l13"> <span class="c"># It is important to yield *here* in order to stop the</span></span><a href="#l13"></a>
+ <span id="l14"> <span class="c"># infinite recursion.</span></span><a href="#l14"></a>
+ <span id="l15"> <span class="kn">yield</span> <span class="n">p</span></span><a href="#l15"></a>
+ <span id="l16"> <span class="n">ns</span> <span class="o">=</span> <span class="n">ifilter</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">n</span><span class="p">:</span> <span class="n">n</span> <span class="o">%</span> <span class="n">p</span> <span class="o">!=</span> <span class="mi">0</span><span class="p">,</span> <span class="n">ns</span><span class="p">)</span></span><a href="#l16"></a>
+ <span id="l17"> <span class="kn">for</span> <span class="n">n</span> <span class="ow">in</span> <span class="n">sieve</span><span class="p">(</span><span class="n">ns</span><span class="p">):</span></span><a href="#l17"></a>
+ <span id="l18"> <span class="kn">yield</span> <span class="n">n</span></span><a href="#l18"></a>
+ <span id="l19"></span><a href="#l19"></a>
+ <span id="l20"> <span class="n">odds</span> <span class="o">=</span> <span class="n">ifilter</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">i</span><span class="p">:</span> <span class="n">i</span> <span class="o">%</span> <span class="mi">2</span> <span class="o">==</span> <span class="mi">1</span><span class="p">,</span> <span class="n">count</span><span class="p">())</span></span><a href="#l20"></a>
+ <span id="l21"> <span class="kn">return</span> <span class="n">chain</span><span class="p">([</span><span class="mi">2</span><span class="p">],</span> <span class="n">sieve</span><span class="p">(</span><span class="n">dropwhile</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">n</span><span class="p">:</span> <span class="n">n</span> <span class="o"><</span> <span class="mi">3</span><span class="p">,</span> <span class="n">odds</span><span class="p">)))</span></span><a href="#l21"></a>
+ <span id="l22"></span><a href="#l22"></a>
+ <span id="l23"><span class="kn">if</span> <span class="n">__name__</span> <span class="o">==</span> <span class="s">"__main__"</span><span class="p">:</span></span><a href="#l23"></a>
+ <span id="l24"> <span class="kn">import</span> <span class="nn">sys</span></span><a href="#l24"></a>
+ <span id="l25"> <span class="kn">try</span><span class="p">:</span></span><a href="#l25"></a>
+ <span id="l26"> <span class="n">n</span> <span class="o">=</span> <span class="nb">int</span><span class="p">(</span><span class="n">sys</span><span class="o">.</span><span class="n">argv</span><span class="p">[</span><span class="mi">1</span><span class="p">])</span></span><a href="#l26"></a>
+ <span id="l27"> <span class="kn">except</span> <span class="p">(</span><span class="ne">ValueError</span><span class="p">,</span> <span class="ne">IndexError</span><span class="p">):</span></span><a href="#l27"></a>
+ <span id="l28"> <span class="n">n</span> <span class="o">=</span> <span class="mi">10</span></span><a href="#l28"></a>
+ <span id="l29"> <span class="n">p</span> <span class="o">=</span> <span class="n">primes</span><span class="p">()</span></span><a href="#l29"></a>
+ <span id="l30"> <span class="kn">print</span> <span class="s">"The first </span><span class="si">%d</span><span class="s"> primes: </span><span class="si">%s</span><span class="s">"</span> <span class="o">%</span> <span class="p">(</span><span class="n">n</span><span class="p">,</span> <span class="nb">list</span><span class="p">(</span><span class="n">islice</span><span class="p">(</span><span class="n">p</span><span class="p">,</span> <span class="n">n</span><span class="p">)))</span></span><a href="#l30"></a>
+ <span id="l31"></span><a href="#l31"></a></pre>
</div>
<script type="text/javascript" src="/static/followlines.js"></script>
@@ -249,13 +246,14 @@
<div class="main">
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>
- annotate primes.py @ 0:<a href="/rev/06824edf55d0">06824edf55d0</a>
+ annotate primes.py @ 0:<a href="/rev/1af356141006">1af356141006</a>
<span class="tag">tip</span>
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -293,36 +291,36 @@
<tr id="l1" class="thisrev">
<td class="annotate parity0">
- <a href="/annotate/06824edf55d0/primes.py#l1">
+ <a href="/annotate/1af356141006/primes.py#l1">
0
</a>
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l1">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l1">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l1"> 1</a> <span class="c">#!/usr/bin/env python</span></td>
+ <td class="source"><a href="#l1"> 1</a> <span class="sd">"""Fun with generators. Corresponding Haskell implementation:</span></td>
</tr>
<tr id="l2" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l2">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l2">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
<td class="source"><a href="#l2"> 2</a> </td>
@@ -332,99 +330,99 @@
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l3">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l3">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l3"> 3</a> <span class="sd">"""Fun with generators. Corresponding Haskell implementation:</span></td>
+ <td class="source"><a href="#l3"> 3</a> <span class="sd">primes = 2 : sieve [3, 5..]</span></td>
</tr>
<tr id="l4" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l4">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l4">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l4"> 4</a> </td>
+ <td class="source"><a href="#l4"> 4</a> <span class="sd"> where sieve (p:ns) = p : sieve [n | n <- ns, mod n p /= 0]</span></td>
</tr>
<tr id="l5" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l5">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l5">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l5"> 5</a> <span class="sd">primes = 2 : sieve [3, 5..]</span></td>
+ <td class="source"><a href="#l5"> 5</a> <span class="sd">"""</span></td>
</tr>
<tr id="l6" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l6">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l6">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l6"> 6</a> <span class="sd"> where sieve (p:ns) = p : sieve [n | n <- ns, mod n p /= 0]</span></td>
+ <td class="source"><a href="#l6"> 6</a> </td>
</tr>
<tr id="l7" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l7">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l7">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l7"> 7</a> <span class="sd">"""</span></td>
+ <td class="source"><a href="#l7"> 7</a> <span class="kn">from</span> <span class="nn">itertools</span> <span class="kn">import</span> <span class="n">dropwhile</span><span class="p">,</span> <span class="n">ifilter</span><span class="p">,</span> <span class="n">islice</span><span class="p">,</span> <span class="n">count</span><span class="p">,</span> <span class="n">chain</span></td>
</tr>
<tr id="l8" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l8">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l8">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
<td class="source"><a href="#l8"> 8</a> </td>
@@ -434,425 +432,391 @@
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l9">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l9">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l9"> 9</a> <span class="kn">from</span> <span class="nn">itertools</span> <span class="kn">import</span> <span class="n">dropwhile</span><span class="p">,</span> <span class="n">ifilter</span><span class="p">,</span> <span class="n">islice</span><span class="p">,</span> <span class="n">count</span><span class="p">,</span> <span class="n">chain</span></td>
+ <td class="source"><a href="#l9"> 9</a> <span class="kn">def</span> <span class="nf">primes</span><span class="p">():</span></td>
</tr>
<tr id="l10" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l10">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l10">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l10"> 10</a> </td>
+ <td class="source"><a href="#l10"> 10</a> <span class="sd">"""Generate all primes."""</span></td>
</tr>
<tr id="l11" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l11">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l11">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l11"> 11</a> <span class="kn">def</span> <span class="nf">primes</span><span class="p">():</span></td>
+ <td class="source"><a href="#l11"> 11</a> <span class="kn">def</span> <span class="nf">sieve</span><span class="p">(</span><span class="n">ns</span><span class="p">):</span></td>
</tr>
<tr id="l12" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l12">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l12">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l12"> 12</a> <span class="sd">"""Generate all primes."""</span></td>
+ <td class="source"><a href="#l12"> 12</a> <span class="n">p</span> <span class="o">=</span> <span class="n">ns</span><span class="o">.</span><span class="n">next</span><span class="p">()</span></td>
</tr>
<tr id="l13" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l13">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l13">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l13"> 13</a> <span class="kn">def</span> <span class="nf">sieve</span><span class="p">(</span><span class="n">ns</span><span class="p">):</span></td>
+ <td class="source"><a href="#l13"> 13</a> <span class="c"># It is important to yield *here* in order to stop the</span></td>
</tr>
<tr id="l14" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l14">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l14">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l14"> 14</a> <span class="n">p</span> <span class="o">=</span> <span class="n">ns</span><span class="o">.</span><span class="n">next</span><span class="p">()</span></td>
+ <td class="source"><a href="#l14"> 14</a> <span class="c"># infinite recursion.</span></td>
</tr>
<tr id="l15" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l15">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l15">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l15"> 15</a> <span class="c"># It is important to yield *here* in order to stop the</span></td>
+ <td class="source"><a href="#l15"> 15</a> <span class="kn">yield</span> <span class="n">p</span></td>
</tr>
<tr id="l16" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l16">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l16">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l16"> 16</a> <span class="c"># infinite recursion.</span></td>
+ <td class="source"><a href="#l16"> 16</a> <span class="n">ns</span> <span class="o">=</span> <span class="n">ifilter</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">n</span><span class="p">:</span> <span class="n">n</span> <span class="o">%</span> <span class="n">p</span> <span class="o">!=</span> <span class="mi">0</span><span class="p">,</span> <span class="n">ns</span><span class="p">)</span></td>
</tr>
<tr id="l17" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l17">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l17">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l17"> 17</a> <span class="kn">yield</span> <span class="n">p</span></td>
+ <td class="source"><a href="#l17"> 17</a> <span class="kn">for</span> <span class="n">n</span> <span class="ow">in</span> <span class="n">sieve</span><span class="p">(</span><span class="n">ns</span><span class="p">):</span></td>
</tr>
<tr id="l18" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l18">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l18">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l18"> 18</a> <span class="n">ns</span> <span class="o">=</span> <span class="n">ifilter</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">n</span><span class="p">:</span> <span class="n">n</span> <span class="o">%</span> <span class="n">p</span> <span class="o">!=</span> <span class="mi">0</span><span class="p">,</span> <span class="n">ns</span><span class="p">)</span></td>
+ <td class="source"><a href="#l18"> 18</a> <span class="kn">yield</span> <span class="n">n</span></td>
</tr>
<tr id="l19" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l19">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l19">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l19"> 19</a> <span class="kn">for</span> <span class="n">n</span> <span class="ow">in</span> <span class="n">sieve</span><span class="p">(</span><span class="n">ns</span><span class="p">):</span></td>
+ <td class="source"><a href="#l19"> 19</a> </td>
</tr>
<tr id="l20" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l20">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l20">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l20"> 20</a> <span class="kn">yield</span> <span class="n">n</span></td>
+ <td class="source"><a href="#l20"> 20</a> <span class="n">odds</span> <span class="o">=</span> <span class="n">ifilter</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">i</span><span class="p">:</span> <span class="n">i</span> <span class="o">%</span> <span class="mi">2</span> <span class="o">==</span> <span class="mi">1</span><span class="p">,</span> <span class="n">count</span><span class="p">())</span></td>
</tr>
<tr id="l21" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l21">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l21">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l21"> 21</a> </td>
+ <td class="source"><a href="#l21"> 21</a> <span class="kn">return</span> <span class="n">chain</span><span class="p">([</span><span class="mi">2</span><span class="p">],</span> <span class="n">sieve</span><span class="p">(</span><span class="n">dropwhile</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">n</span><span class="p">:</span> <span class="n">n</span> <span class="o"><</span> <span class="mi">3</span><span class="p">,</span> <span class="n">odds</span><span class="p">)))</span></td>
</tr>
<tr id="l22" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l22">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l22">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l22"> 22</a> <span class="n">odds</span> <span class="o">=</span> <span class="n">ifilter</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">i</span><span class="p">:</span> <span class="n">i</span> <span class="o">%</span> <span class="mi">2</span> <span class="o">==</span> <span class="mi">1</span><span class="p">,</span> <span class="n">count</span><span class="p">())</span></td>
+ <td class="source"><a href="#l22"> 22</a> </td>
</tr>
<tr id="l23" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l23">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l23">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l23"> 23</a> <span class="kn">return</span> <span class="n">chain</span><span class="p">([</span><span class="mi">2</span><span class="p">],</span> <span class="n">sieve</span><span class="p">(</span><span class="n">dropwhile</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">n</span><span class="p">:</span> <span class="n">n</span> <span class="o"><</span> <span class="mi">3</span><span class="p">,</span> <span class="n">odds</span><span class="p">)))</span></td>
+ <td class="source"><a href="#l23"> 23</a> <span class="kn">if</span> <span class="n">__name__</span> <span class="o">==</span> <span class="s">"__main__"</span><span class="p">:</span></td>
</tr>
<tr id="l24" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l24">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l24">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l24"> 24</a> </td>
+ <td class="source"><a href="#l24"> 24</a> <span class="kn">import</span> <span class="nn">sys</span></td>
</tr>
<tr id="l25" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l25">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l25">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l25"> 25</a> <span class="kn">if</span> <span class="n">__name__</span> <span class="o">==</span> <span class="s">"__main__"</span><span class="p">:</span></td>
+ <td class="source"><a href="#l25"> 25</a> <span class="kn">try</span><span class="p">:</span></td>
</tr>
<tr id="l26" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l26">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l26">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l26"> 26</a> <span class="kn">import</span> <span class="nn">sys</span></td>
+ <td class="source"><a href="#l26"> 26</a> <span class="n">n</span> <span class="o">=</span> <span class="nb">int</span><span class="p">(</span><span class="n">sys</span><span class="o">.</span><span class="n">argv</span><span class="p">[</span><span class="mi">1</span><span class="p">])</span></td>
</tr>
<tr id="l27" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l27">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l27">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l27"> 27</a> <span class="kn">try</span><span class="p">:</span></td>
+ <td class="source"><a href="#l27"> 27</a> <span class="kn">except</span> <span class="p">(</span><span class="ne">ValueError</span><span class="p">,</span> <span class="ne">IndexError</span><span class="p">):</span></td>
</tr>
<tr id="l28" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l28">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l28">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l28"> 28</a> <span class="n">n</span> <span class="o">=</span> <span class="nb">int</span><span class="p">(</span><span class="n">sys</span><span class="o">.</span><span class="n">argv</span><span class="p">[</span><span class="mi">1</span><span class="p">])</span></td>
+ <td class="source"><a href="#l28"> 28</a> <span class="n">n</span> <span class="o">=</span> <span class="mi">10</span></td>
</tr>
<tr id="l29" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l29">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l29">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l29"> 29</a> <span class="kn">except</span> <span class="p">(</span><span class="ne">ValueError</span><span class="p">,</span> <span class="ne">IndexError</span><span class="p">):</span></td>
+ <td class="source"><a href="#l29"> 29</a> <span class="n">p</span> <span class="o">=</span> <span class="n">primes</span><span class="p">()</span></td>
</tr>
<tr id="l30" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l30">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l30">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l30"> 30</a> <span class="n">n</span> <span class="o">=</span> <span class="mi">10</span></td>
+ <td class="source"><a href="#l30"> 30</a> <span class="kn">print</span> <span class="s">"The first </span><span class="si">%d</span><span class="s"> primes: </span><span class="si">%s</span><span class="s">"</span> <span class="o">%</span> <span class="p">(</span><span class="n">n</span><span class="p">,</span> <span class="nb">list</span><span class="p">(</span><span class="n">islice</span><span class="p">(</span><span class="n">p</span><span class="p">,</span> <span class="n">n</span><span class="p">)))</span></td>
</tr>
<tr id="l31" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l31">
- 06824edf55d0</a>
- a
- </div>
- <div><em>test</em></div>
- <div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
- </div>
- </td>
- <td class="source"><a href="#l31"> 31</a> <span class="n">p</span> <span class="o">=</span> <span class="n">primes</span><span class="p">()</span></td>
- </tr>
- <tr id="l32" class="thisrev">
- <td class="annotate parity0">
-
- <div class="annotate-info">
- <div>
- <a href="/annotate/06824edf55d0/primes.py#l32">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l31">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l32"> 32</a> <span class="kn">print</span> <span class="s">"The first </span><span class="si">%d</span><span class="s"> primes: </span><span class="si">%s</span><span class="s">"</span> <span class="o">%</span> <span class="p">(</span><span class="n">n</span><span class="p">,</span> <span class="nb">list</span><span class="p">(</span><span class="n">islice</span><span class="p">(</span><span class="n">p</span><span class="p">,</span> <span class="n">n</span><span class="p">)))</span></td>
- </tr>
- <tr id="l33" class="thisrev">
- <td class="annotate parity0">
-
- <div class="annotate-info">
- <div>
- <a href="/annotate/06824edf55d0/primes.py#l33">
- 06824edf55d0</a>
- a
- </div>
- <div><em>test</em></div>
- <div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
- </div>
- </td>
- <td class="source"><a href="#l33"> 33</a> </td>
+ <td class="source"><a href="#l31"> 31</a> </td>
</tr>
</tbody>
</table>
@@ -946,7 +910,7 @@
test that fileset in highlightfiles works and primes.py is not highlighted
$ get-with-headers.py localhost:$HGPORT 'file/tip/primes.py' | grep 'id="l11"'
- <span id="l11">def primes():</span><a href="#l11"></a>
+ <span id="l11"> def sieve(ns):</span><a href="#l11"></a>
errors encountered
@@ -992,7 +956,7 @@
> EOF
$ cat > unknownfile << EOF
- > #!/usr/bin/python
+ > #!$PYTHON
> def foo():
> pass
> EOF
--- a/tests/test-histedit-fold.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-histedit-fold.t Tue Jun 20 16:33:46 2017 -0400
@@ -154,7 +154,7 @@
> from mercurial import util
> def abortfolding(ui, repo, hooktype, **kwargs):
> ctx = repo[kwargs.get('node')]
- > if set(ctx.files()) == set(['c', 'd', 'f']):
+ > if set(ctx.files()) == {'c', 'd', 'f'}:
> return True # abort folding commit only
> ui.warn('allow non-folding commit\\n')
> EOF
--- a/tests/test-histedit-obsolete.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-histedit-obsolete.t Tue Jun 20 16:33:46 2017 -0400
@@ -545,7 +545,7 @@
|
o 0:cb9a9f314b8b (public) a
- $ hg histedit -r 'b449568bf7fc' --commands - << EOF
+ $ hg histedit -r 'b449568bf7fc' --commands - << EOF --config experimental.evolution.track-operation=1
> pick b449568bf7fc 13 f
> pick 7395e1ff83bd 15 h
> pick 6b70183d2492 14 g
@@ -556,7 +556,7 @@
Editing (ee118ab9fa44), you may commit or record as needed now.
(hg histedit --continue to resume)
[1]
- $ hg histedit --continue
+ $ hg histedit --continue --config experimental.evolution.track-operation=1
$ hg log -G
@ 23:175d6b286a22 (secret) k
|
@@ -574,3 +574,5 @@
|
o 0:cb9a9f314b8b (public) a
+ $ hg debugobsolete --rev .
+ ee118ab9fa44ebb86be85996548b5517a39e5093 175d6b286a224c23f192e79a581ce83131a53fa2 0 (*) {'operation': 'histedit', 'user': 'test'} (glob)
--- a/tests/test-hook.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-hook.t Tue Jun 20 16:33:46 2017 -0400
@@ -99,9 +99,9 @@
abort: pre-identify hook exited with status 1
[255]
$ hg cat b
- pre-cat hook: HG_ARGS=cat b HG_HOOKNAME=pre-cat HG_HOOKTYPE=pre-cat HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b']
+ pre-cat hook: HG_ARGS=cat b HG_HOOKNAME=pre-cat HG_HOOKTYPE=pre-cat HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': '', 'template': ''} HG_PATS=['b']
b
- post-cat hook: HG_ARGS=cat b HG_HOOKNAME=post-cat HG_HOOKTYPE=post-cat HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b'] HG_RESULT=0
+ post-cat hook: HG_ARGS=cat b HG_HOOKNAME=post-cat HG_HOOKTYPE=post-cat HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': '', 'template': ''} HG_PATS=['b'] HG_RESULT=0
$ cd ../b
$ hg pull ../a
@@ -648,6 +648,7 @@
foo
committing manifest
committing changelog
+ updating the branch cache
committed changeset 1:52998019f6252a2b893452765fcb0a47351a5708
calling hook commit.auto: hgext_hookext.autohook
Automatically installed hook
--- a/tests/test-http-bad-server.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-http-bad-server.t Tue Jun 20 16:33:46 2017 -0400
@@ -71,13 +71,13 @@
TODO this error message is not very good
$ hg clone http://localhost:$HGPORT/ clone
- abort: error: (''|) (re)
+ abort: error: ''
[255]
$ killdaemons.py $DAEMON_PIDS
$ cat error.log
- readline\(1 from (-1|65537)\) -> \(1\) G (re)
+ readline(1 from 65537) -> (1) G
read limit reached; closing socket
$ rm -f error.log
@@ -87,13 +87,13 @@
$ hg --config badserver.closeafterrecvbytes=40 serve -p $HGPORT -d --pid-file=hg.pid -E error.log
$ cat hg.pid > $DAEMON_PIDS
$ hg clone http://localhost:$HGPORT/ clone
- abort: error: (''|) (re)
+ abort: error: ''
[255]
$ killdaemons.py $DAEMON_PIDS
$ cat error.log
- readline\(40 from (-1|65537)\) -> \(33\) GET /\?cmd=capabilities HTTP/1.1\\r\\n (re)
+ readline(40 from 65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n
readline(7 from -1) -> (7) Accept-
read limit reached; closing socket
@@ -104,13 +104,13 @@
$ hg --config badserver.closeafterrecvbytes=210 serve -p $HGPORT -d --pid-file=hg.pid -E error.log
$ cat hg.pid > $DAEMON_PIDS
$ hg clone http://localhost:$HGPORT/ clone
- abort: error: (''|) (re)
+ abort: error: ''
[255]
$ killdaemons.py $DAEMON_PIDS
$ cat error.log
- readline\(210 from (-1|65537)\) -> \(33\) GET /\?cmd=capabilities HTTP/1.1\\r\\n (re)
+ readline(210 from 65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n
readline(177 from -1) -> (27) Accept-Encoding: identity\r\n
readline(150 from -1) -> (35) accept: application/mercurial-0.1\r\n
readline(115 from -1) -> (2?) host: localhost:$HGPORT\r\n (glob)
@@ -123,10 +123,10 @@
write(21) -> Content-Length: 405\r\n
write(2) -> \r\n
write(405) -> lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx compression=none
- readline\(4[12] from (-1|65537)\) -> \(26\) GET /\?cmd=batch HTTP/1.1\\r\\n (re)
+ readline(4? from 65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n (glob)
readline(1? from -1) -> (1?) Accept-Encoding* (glob)
read limit reached; closing socket
- readline\(210 from (-1|65537)\) -> \(26\) GET /\?cmd=batch HTTP/1.1\\r\\n (re)
+ readline(210 from 65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n
readline(184 from -1) -> (27) Accept-Encoding: identity\r\n
readline(157 from -1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
readline(128 from -1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n
@@ -143,13 +143,13 @@
$ cat hg.pid > $DAEMON_PIDS
$ hg clone http://localhost:$HGPORT/ clone
requesting all changes
- abort: error: (''|) (re)
+ abort: error: ''
[255]
$ killdaemons.py $DAEMON_PIDS
$ cat error.log
- readline\(292 from (-1|65537)\) -> \(33\) GET /\?cmd=capabilities HTTP/1.1\\r\\n (re)
+ readline(292 from 65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n
readline(259 from -1) -> (27) Accept-Encoding: identity\r\n
readline(232 from -1) -> (35) accept: application/mercurial-0.1\r\n
readline(197 from -1) -> (2?) host: localhost:$HGPORT\r\n (glob)
@@ -162,13 +162,13 @@
write(21) -> Content-Length: 405\r\n
write(2) -> \r\n
write(405) -> lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx compression=none
- readline\(12[34] from (-1|65537)\) -> \(2[67]\) GET /\?cmd=batch HTTP/1.1\\r\\n (re)
+ readline\(12[34] from 65537\) -> \(2[67]\) GET /\?cmd=batch HTTP/1.1\\r\\n (re)
readline(9? from -1) -> (27) Accept-Encoding: identity\r\n (glob)
readline(7? from -1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob)
readline(4? from -1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n (glob)
readline(1 from -1) -> (1) x (?)
read limit reached; closing socket
- readline\(292 from (-1|65537)\) -> \(26\) GET /\?cmd=batch HTTP/1.1\\r\\n (re)
+ readline(292 from 65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n
readline(266 from -1) -> (27) Accept-Encoding: identity\r\n
readline(239 from -1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
readline(210 from -1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n
@@ -184,9 +184,9 @@
write(20) -> Content-Length: 42\r\n
write(2) -> \r\n
write(42) -> 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n;
- readline\(1[23] from (-1|65537)\) -> \(1[23]\) GET /\?cmd=ge.? (re)
+ readline\(1[23] from 65537\) -> \(1[23]\) GET /\?cmd=ge.? (re)
read limit reached; closing socket
- readline\(292 from (-1|65537)\) -> \(30\) GET /\?cmd=getbundle HTTP/1.1\\r\\n (re)
+ readline(292 from 65537) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n
readline(262 from -1) -> (27) Accept-Encoding: identity\r\n
readline(235 from -1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
readline(206 from -1) -> (206) x-hgarg-1: bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Ali
@@ -200,13 +200,13 @@
$ cat hg.pid > $DAEMON_PIDS
$ hg clone http://localhost:$HGPORT/ clone
- abort: error: (''|) (re)
+ abort: error: ''
[255]
$ killdaemons.py $DAEMON_PIDS
$ cat error.log
- readline\(315 from (-1|65537)\) -> \(33\) GET /\?cmd=capabilities HTTP/1.1\\r\\n (re)
+ readline(315 from 65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n
readline(282 from -1) -> (27) Accept-Encoding: identity\r\n
readline(255 from -1) -> (35) accept: application/mercurial-0.1\r\n
readline(220 from -1) -> (2?) host: localhost:$HGPORT\r\n (glob)
@@ -219,14 +219,14 @@
write(21) -> Content-Length: 418\r\n
write(2) -> \r\n
write(418) -> lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httppostargs httpmediatype=0.1rx,0.1tx,0.2tx compression=none
- readline\(14[67] from (-1|65537)\) -> \(2[67]\) POST /\?cmd=batch HTTP/1.1\\r\\n (re)
+ readline\(14[67] from 65537\) -> \(2[67]\) POST /\?cmd=batch HTTP/1.1\\r\\n (re)
readline\(1(19|20) from -1\) -> \(27\) Accept-Encoding: identity\\r\\n (re)
readline(9? from -1) -> (41) content-type: application/mercurial-0.1\r\n (glob)
readline(5? from -1) -> (19) vary: X-HgProto-1\r\n (glob)
readline(3? from -1) -> (19) x-hgargs-post: 28\r\n (glob)
readline(1? from -1) -> (1?) x-hgproto-1: * (glob)
read limit reached; closing socket
- readline\(315 from (-1|65537)\) -> \(27\) POST /\?cmd=batch HTTP/1.1\\r\\n (re)
+ readline(315 from 65537) -> (27) POST /?cmd=batch HTTP/1.1\r\n
readline(288 from -1) -> (27) Accept-Encoding: identity\r\n
readline(261 from -1) -> (41) content-type: application/mercurial-0.1\r\n
readline(220 from -1) -> (19) vary: X-HgProto-1\r\n
@@ -257,7 +257,7 @@
$ killdaemons.py $DAEMON_PIDS
$ cat error.log
- readline\((-1|65537)\) -> \(33\) GET /\?cmd=capabilities HTTP/1.1\\r\\n (re)
+ readline(65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (35) accept: application/mercurial-0.1\r\n
readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
@@ -282,7 +282,7 @@
$ killdaemons.py $DAEMON_PIDS
$ cat error.log
- readline\((-1|65537)\) -> \(33\) GET /\?cmd=capabilities HTTP/1.1\\r\\n (re)
+ readline(65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (35) accept: application/mercurial-0.1\r\n
readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
@@ -317,7 +317,7 @@
$ killdaemons.py $DAEMON_PIDS
$ cat error.log
- readline\((-1|65537)\) -> \(33\) GET /\?cmd=capabilities HTTP/1.1\\r\\n (re)
+ readline(65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (35) accept: application/mercurial-0.1\r\n
readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
@@ -330,7 +330,7 @@
write(21 from 21) -> (537) Content-Length: 405\r\n
write(2 from 2) -> (535) \r\n
write(405 from 405) -> (130) lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx compression=none
- readline\((-1|65537)\) -> \(26\) GET /\?cmd=batch HTTP/1.1\\r\\n (re)
+ readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
readline(-1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n
@@ -360,7 +360,7 @@
$ killdaemons.py $DAEMON_PIDS
$ cat error.log
- readline\((-1|65537)\) -> \(33\) GET /\?cmd=capabilities HTTP/1.1\\r\\n (re)
+ readline(65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (35) accept: application/mercurial-0.1\r\n
readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
@@ -373,7 +373,7 @@
write(21 from 21) -> (602) Content-Length: 405\r\n
write(2 from 2) -> (600) \r\n
write(405 from 405) -> (195) lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx compression=none
- readline\((-1|65537)\) -> \(26\) GET /\?cmd=batch HTTP/1.1\\r\\n (re)
+ readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
readline(-1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n
@@ -412,7 +412,7 @@
$ killdaemons.py $DAEMON_PIDS
$ cat error.log
- readline\((-1|65537)\) -> \(33\) GET /\?cmd=capabilities HTTP/1.1\\r\\n (re)
+ readline(65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (35) accept: application/mercurial-0.1\r\n
readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
@@ -425,7 +425,7 @@
write(21 from 21) -> (737) Content-Length: 405\r\n
write(2 from 2) -> (735) \r\n
write(405 from 405) -> (330) lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx compression=none
- readline\((-1|65537)\) -> \(26\) GET /\?cmd=batch HTTP/1.1\\r\\n (re)
+ readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
readline(-1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n
@@ -441,7 +441,7 @@
write(20 from 20) -> (173) Content-Length: 42\r\n
write(2 from 2) -> (171) \r\n
write(42 from 42) -> (129) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n;
- readline\((-1|65537)\) -> \(30\) GET /\?cmd=getbundle HTTP/1.1\\r\\n (re)
+ readline(65537) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
readline(-1) -> (396) x-hgarg-1: bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n
@@ -473,7 +473,7 @@
$ killdaemons.py $DAEMON_PIDS
$ cat error.log
- readline\((-1|65537)\) -> \(33\) GET /\?cmd=capabilities HTTP/1.1\\r\\n (re)
+ readline(65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (35) accept: application/mercurial-0.1\r\n
readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
@@ -486,7 +486,7 @@
write(21 from 21) -> (775) Content-Length: 405\r\n
write(2 from 2) -> (773) \r\n
write(405 from 405) -> (368) lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx compression=none
- readline\((-1|65537)\) -> \(26\) GET /\?cmd=batch HTTP/1.1\\r\\n (re)
+ readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
readline(-1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n
@@ -502,7 +502,7 @@
write(20 from 20) -> (211) Content-Length: 42\r\n
write(2 from 2) -> (209) \r\n
write(42 from 42) -> (167) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n;
- readline\((-1|65537)\) -> \(30\) GET /\?cmd=getbundle HTTP/1.1\\r\\n (re)
+ readline(65537) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
readline(-1) -> (396) x-hgarg-1: bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n
@@ -536,7 +536,7 @@
$ killdaemons.py $DAEMON_PIDS
$ cat error.log
- readline\((-1|65537)\) -> \(33\) GET /\?cmd=capabilities HTTP/1.1\\r\\n (re)
+ readline(65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (35) accept: application/mercurial-0.1\r\n
readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
@@ -549,7 +549,7 @@
write(21 from 21) -> (787) Content-Length: 405\r\n
write(2 from 2) -> (785) \r\n
write(405 from 405) -> (380) lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx compression=none
- readline\((-1|65537)\) -> \(26\) GET /\?cmd=batch HTTP/1.1\\r\\n (re)
+ readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
readline(-1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n
@@ -565,7 +565,7 @@
write(20 from 20) -> (223) Content-Length: 42\r\n
write(2 from 2) -> (221) \r\n
write(42 from 42) -> (179) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n;
- readline\((-1|65537)\) -> \(30\) GET /\?cmd=getbundle HTTP/1.1\\r\\n (re)
+ readline(65537) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
readline(-1) -> (396) x-hgarg-1: bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n
--- a/tests/test-http-bundle1.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-http-bundle1.t Tue Jun 20 16:33:46 2017 -0400
@@ -58,6 +58,26 @@
try to clone via stream, should use pull instead
$ hg clone --uncompressed http://localhost:$HGPORT1/ copy2
+ warning: stream clone requested but server has them disabled
+ requesting all changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 4 changes to 4 files
+ updating to branch default
+ 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+try to clone via stream but missing requirements, so should use pull instead
+
+ $ cat > $TESTTMP/removesupportedformat.py << EOF
+ > from mercurial import localrepo
+ > def extsetup(ui):
+ > localrepo.localrepository.supportedformats.remove('generaldelta')
+ > EOF
+
+ $ hg clone --config extensions.rsf=$TESTTMP/removesupportedformat.py --uncompressed http://localhost:$HGPORT/ copy3
+ warning: stream clone requested but client is missing requirements: generaldelta
+ (see https://www.mercurial-scm.org/wiki/MissingRequirement for more information)
requesting all changes
adding changesets
adding manifests
@@ -345,3 +365,41 @@
this is an exercise
[255]
$ cat error.log
+
+disable pull-based clones
+
+ $ hg -R test serve -p $HGPORT1 -d --pid-file=hg4.pid -E error.log --config server.disablefullbundle=True
+ $ cat hg4.pid >> $DAEMON_PIDS
+ $ hg clone http://localhost:$HGPORT1/ disable-pull-clone
+ requesting all changes
+ abort: remote error:
+ server has pull-based clones disabled
+ [255]
+
+... but keep stream clones working
+
+ $ hg clone --uncompressed --noupdate http://localhost:$HGPORT1/ test-stream-clone
+ streaming all changes
+ * files to transfer, * of data (glob)
+ transferred * in * seconds (* KB/sec) (glob)
+ searching for changes
+ no changes found
+
+... and also keep partial clones and pulls working
+ $ hg clone http://localhost:$HGPORT1 --rev 0 test-partial-clone
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 4 changes to 4 files
+ updating to branch default
+ 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg pull -R test-partial-clone
+ pulling from http://localhost:$HGPORT1/
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 2 changesets with 3 changes to 3 files
+ (run 'hg update' to get a working copy)
+
+ $ cat error.log
--- a/tests/test-http-proxy.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-http-proxy.t Tue Jun 20 16:33:46 2017 -0400
@@ -8,7 +8,7 @@
$ hg serve --config server.uncompressed=True -p $HGPORT -d --pid-file=hg.pid
$ cat hg.pid >> $DAEMON_PIDS
$ cd ..
- $ tinyproxy.py $HGPORT1 localhost >proxy.log 2>&1 </dev/null &
+ $ tinyproxy.py $HGPORT1 localhost 2>proxy.log >/dev/null </dev/null &
$ while [ ! -f proxy.pid ]; do sleep 0; done
$ cat proxy.pid >> $DAEMON_PIDS
--- a/tests/test-http.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-http.t Tue Jun 20 16:33:46 2017 -0400
@@ -49,6 +49,26 @@
try to clone via stream, should use pull instead
$ hg clone --uncompressed http://localhost:$HGPORT1/ copy2
+ warning: stream clone requested but server has them disabled
+ requesting all changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 4 changes to 4 files
+ updating to branch default
+ 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+try to clone via stream but missing requirements, so should use pull instead
+
+ $ cat > $TESTTMP/removesupportedformat.py << EOF
+ > from mercurial import localrepo
+ > def extsetup(ui):
+ > localrepo.localrepository.supportedformats.remove('generaldelta')
+ > EOF
+
+ $ hg clone --config extensions.rsf=$TESTTMP/removesupportedformat.py --uncompressed http://localhost:$HGPORT/ copy3
+ warning: stream clone requested but client is missing requirements: generaldelta
+ (see https://www.mercurial-scm.org/wiki/MissingRequirement for more information)
requesting all changes
adding changesets
adding manifests
@@ -334,6 +354,44 @@
[255]
$ cat error.log
+disable pull-based clones
+
+ $ hg -R test serve -p $HGPORT1 -d --pid-file=hg4.pid -E error.log --config server.disablefullbundle=True
+ $ cat hg4.pid >> $DAEMON_PIDS
+ $ hg clone http://localhost:$HGPORT1/ disable-pull-clone
+ requesting all changes
+ remote: abort: server has pull-based clones disabled
+ abort: pull failed on remote
+ (remove --pull if specified or upgrade Mercurial)
+ [255]
+
+... but keep stream clones working
+
+ $ hg clone --uncompressed --noupdate http://localhost:$HGPORT1/ test-stream-clone
+ streaming all changes
+ * files to transfer, * of data (glob)
+ transferred * in * seconds (*/sec) (glob)
+ searching for changes
+ no changes found
+ $ cat error.log
+
+... and also keep partial clones and pulls working
+ $ hg clone http://localhost:$HGPORT1 --rev 0 test-partial-clone
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 4 changes to 4 files
+ updating to branch default
+ 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg pull -R test-partial-clone
+ pulling from http://localhost:$HGPORT1/
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 2 changesets with 3 changes to 3 files
+ (run 'hg update' to get a working copy)
+
corrupt cookies file should yield a warning
$ cat > $TESTTMP/cookies.txt << EOF
--- a/tests/test-https.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-https.t Tue Jun 20 16:33:46 2017 -0400
@@ -333,20 +333,9 @@
> --config hostsecurity.disabletls10warning=true
5fed3813f7f5
-#if no-sslcontext no-py27+
-Setting ciphers doesn't work in Python 2.6
- $ P="$CERTSDIR" hg --config hostsecurity.ciphers=HIGH -R copy-pull id https://localhost:$HGPORT/
- warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info
- abort: setting ciphers in [hostsecurity] is not supported by this version of Python
- (remove the config option or run Mercurial with a modern Python version (preferred))
- [255]
-#endif
+Error message for setting ciphers is different depending on SSLContext support
-Setting ciphers works in Python 2.7+ but the error message is different on
-legacy ssl. We test legacy once and do more feature checking on modern
-configs.
-
-#if py27+ no-sslcontext
+#if no-sslcontext
$ P="$CERTSDIR" hg --config hostsecurity.ciphers=invalid -R copy-pull id https://localhost:$HGPORT/
warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info
abort: *No cipher can be selected. (glob)
@@ -383,7 +372,7 @@
- works without cacerts (hostfingerprints)
$ hg -R copy-pull id https://localhost:$HGPORT/ --insecure --config hostfingerprints.localhost=ec:d8:7c:d6:b3:86:d0:4f:c1:b8:b4:1c:9d:8f:5e:16:8e:ef:1c:03
warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
- (SHA-1 fingerprint for localhost found in legacy [hostfingerprints] section; if you trust this fingerprint, set the following config value in [hostsecurity] and remove the old one from [hostfingerprints] to upgrade to a more secure SHA-256 fingerprint: localhost:fingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e)
+ (SHA-1 fingerprint for localhost found in legacy [hostfingerprints] section; if you trust this fingerprint, remove the old SHA-1 fingerprint from [hostfingerprints] and add the following entry to the new [hostsecurity] section: localhost:fingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e)
5fed3813f7f5
- works without cacerts (hostsecurity)
@@ -398,7 +387,7 @@
- multiple fingerprints specified and first matches
$ hg --config 'hostfingerprints.localhost=ecd87cd6b386d04fc1b8b41c9d8f5e168eef1c03, deadbeefdeadbeefdeadbeefdeadbeefdeadbeef' -R copy-pull id https://localhost:$HGPORT/ --insecure
warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
- (SHA-1 fingerprint for localhost found in legacy [hostfingerprints] section; if you trust this fingerprint, set the following config value in [hostsecurity] and remove the old one from [hostfingerprints] to upgrade to a more secure SHA-256 fingerprint: localhost:fingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e)
+ (SHA-1 fingerprint for localhost found in legacy [hostfingerprints] section; if you trust this fingerprint, remove the old SHA-1 fingerprint from [hostfingerprints] and add the following entry to the new [hostsecurity] section: localhost:fingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e)
5fed3813f7f5
$ hg --config 'hostsecurity.localhost:fingerprints=sha1:ecd87cd6b386d04fc1b8b41c9d8f5e168eef1c03, sha1:deadbeefdeadbeefdeadbeefdeadbeefdeadbeef' -R copy-pull id https://localhost:$HGPORT/
@@ -408,7 +397,7 @@
- multiple fingerprints specified and last matches
$ hg --config 'hostfingerprints.localhost=deadbeefdeadbeefdeadbeefdeadbeefdeadbeef, ecd87cd6b386d04fc1b8b41c9d8f5e168eef1c03' -R copy-pull id https://localhost:$HGPORT/ --insecure
warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
- (SHA-1 fingerprint for localhost found in legacy [hostfingerprints] section; if you trust this fingerprint, set the following config value in [hostsecurity] and remove the old one from [hostfingerprints] to upgrade to a more secure SHA-256 fingerprint: localhost:fingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e)
+ (SHA-1 fingerprint for localhost found in legacy [hostfingerprints] section; if you trust this fingerprint, remove the old SHA-1 fingerprint from [hostfingerprints] and add the following entry to the new [hostsecurity] section: localhost:fingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e)
5fed3813f7f5
$ hg --config 'hostsecurity.localhost:fingerprints=sha1:deadbeefdeadbeefdeadbeefdeadbeefdeadbeef, sha1:ecd87cd6b386d04fc1b8b41c9d8f5e168eef1c03' -R copy-pull id https://localhost:$HGPORT/
@@ -440,7 +429,7 @@
- ignores that certificate doesn't match hostname
$ hg -R copy-pull id https://$LOCALIP:$HGPORT/ --config hostfingerprints.$LOCALIP=ecd87cd6b386d04fc1b8b41c9d8f5e168eef1c03
warning: connecting to $LOCALIP using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
- (SHA-1 fingerprint for $LOCALIP found in legacy [hostfingerprints] section; if you trust this fingerprint, set the following config value in [hostsecurity] and remove the old one from [hostfingerprints] to upgrade to a more secure SHA-256 fingerprint: $LOCALIP:fingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e)
+ (SHA-1 fingerprint for $LOCALIP found in legacy [hostfingerprints] section; if you trust this fingerprint, remove the old SHA-1 fingerprint from [hostfingerprints] and add the following entry to the new [hostsecurity] section: $LOCALIP:fingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e)
5fed3813f7f5
Ports used by next test. Kill servers.
@@ -579,7 +568,7 @@
$ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull https://localhost:$HGPORT/ --config hostfingerprints.localhost=ecd87cd6b386d04fc1b8b41c9d8f5e168eef1c03 --trace
pulling from https://*:$HGPORT/ (glob)
warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
- (SHA-1 fingerprint for localhost found in legacy [hostfingerprints] section; if you trust this fingerprint, set the following config value in [hostsecurity] and remove the old one from [hostfingerprints] to upgrade to a more secure SHA-256 fingerprint: localhost:fingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e)
+ (SHA-1 fingerprint for localhost found in legacy [hostfingerprints] section; if you trust this fingerprint, remove the old SHA-1 fingerprint from [hostfingerprints] and add the following entry to the new [hostsecurity] section: localhost:fingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e)
searching for changes
no changes found
--- a/tests/test-i18n.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-i18n.t Tue Jun 20 16:33:46 2017 -0400
@@ -45,6 +45,6 @@
tool itself by doctest
$ cd "$TESTDIR"/../i18n
- $ python check-translation.py *.po
- $ python check-translation.py --doctest
+ $ $PYTHON check-translation.py *.po
+ $ $PYTHON check-translation.py --doctest
$ cd $TESTTMP
--- a/tests/test-impexp-branch.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-impexp-branch.t Tue Jun 20 16:33:46 2017 -0400
@@ -30,12 +30,12 @@
$ hg export 1 > ../r1.patch
$ cd ..
- $ if python findbranch.py < r0.patch; then
+ $ if $PYTHON findbranch.py < r0.patch; then
> echo "Export of default branch revision has Branch header" 1>&2
> exit 1
> fi
- $ if python findbranch.py < r1.patch; then
+ $ if $PYTHON findbranch.py < r1.patch; then
> : # Do nothing
> else
> echo "Export of branch revision is missing Branch header" 1>&2
--- a/tests/test-import-context.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-import-context.t Tue Jun 20 16:33:46 2017 -0400
@@ -26,10 +26,10 @@
$ hg init repo
$ cd repo
- $ python ../writepatterns.py a 0 5A 1B 5C 1D
- $ python ../writepatterns.py b 1 1A 1B
- $ python ../writepatterns.py c 1 5A
- $ python ../writepatterns.py d 1 5A 1B
+ $ $PYTHON ../writepatterns.py a 0 5A 1B 5C 1D
+ $ $PYTHON ../writepatterns.py b 1 1A 1B
+ $ $PYTHON ../writepatterns.py c 1 5A
+ $ $PYTHON ../writepatterns.py d 1 5A 1B
$ hg add
adding a
adding b
@@ -114,13 +114,13 @@
What's in a
- $ python ../cat.py a
+ $ $PYTHON ../cat.py a
'A\nA\nA\nA\nA\nE\nC\nC\nC\nC\nC\nF\nF\n'
- $ python ../cat.py newnoeol
+ $ $PYTHON ../cat.py newnoeol
'a\nb'
- $ python ../cat.py c
+ $ $PYTHON ../cat.py c
'A\nA\nA\nA\nA\nB\nB\n'
- $ python ../cat.py d
+ $ $PYTHON ../cat.py d
'A\nA\nA\nA\n'
$ cd ..
--- a/tests/test-import-eol.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-import-eol.t Tue Jun 20 16:33:46 2017 -0400
@@ -29,7 +29,7 @@
$ hg ci -Am adda
adding .hgignore
adding a
- $ python ../makepatch.py
+ $ $PYTHON ../makepatch.py
invalid eol
--- a/tests/test-import.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-import.t Tue Jun 20 16:33:46 2017 -0400
@@ -64,7 +64,7 @@
added 1 changesets with 2 changes to 2 files
updating to branch default
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
- $ HGEDITOR=cat hg --config ui.patch='python ../dummypatch.py' --cwd b import --edit ../exported-tip.patch
+ $ HGEDITOR=cat hg --config ui.patch='$PYTHON ../dummypatch.py' --cwd b import --edit ../exported-tip.patch
applying ../exported-tip.patch
second change
@@ -294,7 +294,7 @@
added 1 changesets with 2 changes to 2 files
updating to branch default
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
- $ python mkmsg.py diffed-tip.patch msg.patch
+ $ $PYTHON mkmsg.py diffed-tip.patch msg.patch
$ hg --cwd b import ../msg.patch
applying ../msg.patch
$ hg --cwd b tip | grep email
@@ -356,7 +356,7 @@
added 1 changesets with 2 changes to 2 files
updating to branch default
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
- $ python mkmsg.py exported-tip.patch msg.patch
+ $ $PYTHON mkmsg.py exported-tip.patch msg.patch
$ cat msg.patch | hg --cwd b import -
applying patch from stdin
$ hg --cwd b tip | grep second
@@ -387,7 +387,7 @@
added 1 changesets with 2 changes to 2 files
updating to branch default
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
- $ python mkmsg2.py diffed-tip.patch msg.patch
+ $ $PYTHON mkmsg2.py diffed-tip.patch msg.patch
$ cat msg.patch | hg --cwd b import -
applying patch from stdin
$ hg --cwd b tip --template '{desc}\n'
--- a/tests/test-imports-checker.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-imports-checker.t Tue Jun 20 16:33:46 2017 -0400
@@ -124,7 +124,7 @@
> from mercurial.node import hex
> EOF
- $ python "$import_checker" testpackage*/*.py testpackage/subpackage/*.py
+ $ $PYTHON "$import_checker" testpackage*/*.py testpackage/subpackage/*.py
testpackage/importalias.py:2: ui module must be "as" aliased to uimod
testpackage/importfromalias.py:2: ui from testpackage must be "as" aliased to uimod
testpackage/importfromrelative.py:2: import should be relative: testpackage.unsorted
--- a/tests/test-inherit-mode.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-inherit-mode.t Tue Jun 20 16:33:46 2017 -0400
@@ -45,7 +45,7 @@
store can be written by the group, other files cannot
store is setgid
- $ python ../printmodes.py .
+ $ $PYTHON ../printmodes.py .
00700 ./.hg/
00600 ./.hg/00changelog.i
00600 ./.hg/requires
@@ -61,7 +61,7 @@
(in particular, store/**, dirstate, branch cache file, undo files)
new directories are setgid
- $ python ../printmodes.py .
+ $ $PYTHON ../printmodes.py .
00700 ./.hg/
00600 ./.hg/00changelog.i
00770 ./.hg/cache/
@@ -98,7 +98,7 @@
before push
group can write everything
- $ python ../printmodes.py ../push
+ $ $PYTHON ../printmodes.py ../push
00770 ../push/.hg/
00660 ../push/.hg/00changelog.i
00660 ../push/.hg/requires
@@ -110,13 +110,11 @@
after push
group can still write everything
- $ python ../printmodes.py ../push
+ $ $PYTHON ../printmodes.py ../push
00770 ../push/.hg/
00660 ../push/.hg/00changelog.i
00770 ../push/.hg/cache/
00660 ../push/.hg/cache/branch2-base
- 00660 ../push/.hg/cache/rbc-names-v1
- 00660 ../push/.hg/cache/rbc-revs-v1
00660 ../push/.hg/dirstate
00660 ../push/.hg/requires
00770 ../push/.hg/store/
@@ -148,8 +146,8 @@
$ mkdir dir
$ touch dir/file
$ hg ci -qAm 'add dir/file'
- $ storemode=`python ../mode.py .hg/store`
- $ dirmode=`python ../mode.py .hg/store/data/dir`
+ $ storemode=`$PYTHON ../mode.py .hg/store`
+ $ dirmode=`$PYTHON ../mode.py .hg/store/data/dir`
$ if [ "$storemode" != "$dirmode" ]; then
> echo "$storemode != $dirmode"
> fi
--- a/tests/test-install.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-install.t Tue Jun 20 16:33:46 2017 -0400
@@ -34,7 +34,7 @@
"editornotfound": false,
"encoding": "ascii",
"encodingerror": null,
- "extensionserror": null,
+ "extensionserror": null, (no-pure !)
"hgmodulepolicy": "*", (glob)
"hgmodules": "*mercurial", (glob)
"hgver": "*", (glob)
@@ -159,7 +159,7 @@
> print(' %s' % f)
> EOF
- $ python wixxml.py help
+ $ $PYTHON wixxml.py help
Not installed:
help/common.txt
help/hg-ssh.8.txt
@@ -168,8 +168,45 @@
help/hgrc.5.txt
Not tracked:
- $ python wixxml.py templates
+ $ $PYTHON wixxml.py templates
Not installed:
Not tracked:
#endif
+
+#if virtualenv
+
+Verify that Mercurial is installable with pip. Note that this MUST be
+the last test in this file, because we do some nasty things to the
+shell environment in order to make the virtualenv work reliably.
+
+ $ cd $TESTTMP
+Note: --no-site-packages is deprecated, but some places have an
+ancient virtualenv from their linux distro or similar and it's not yet
+the default for them.
+ $ unset PYTHONPATH
+ $ $PYTHON -m virtualenv --no-site-packages --never-download installenv >> pip.log
+Note: we use this weird path to run pip and hg to avoid platform differences,
+since it's bin on most platforms but Scripts on Windows.
+ $ ./installenv/*/pip install $TESTDIR/.. >> pip.log
+ $ ./installenv/*/hg debuginstall || cat pip.log
+ checking encoding (ascii)...
+ checking Python executable (*) (glob)
+ checking Python version (2.*) (glob)
+ checking Python lib (*)... (glob)
+ checking Python security support (*) (glob)
+ TLS 1.2 not supported by Python install; network connections lack modern security (?)
+ SNI not supported by Python install; may have connectivity issues with some servers (?)
+ checking Mercurial version (*) (glob)
+ checking Mercurial custom build (*) (glob)
+ checking module policy (*) (glob)
+ checking installed modules (*/mercurial)... (glob)
+ checking registered compression engines (*) (glob)
+ checking available compression engines (*) (glob)
+ checking available compression engines for wire protocol (*) (glob)
+ checking templates ($TESTTMP/installenv/*/site-packages/mercurial/templates)... (glob)
+ checking default template ($TESTTMP/installenv/*/site-packages/mercurial/templates/map-cmdline.default) (glob)
+ checking commit editor... (*) (glob)
+ checking username (test)
+ no problems detected
+#endif
--- a/tests/test-issue1175.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-issue1175.t Tue Jun 20 16:33:46 2017 -0400
@@ -33,6 +33,7 @@
warning: can't find ancestor for 'b' copied from 'a'!
committing manifest
committing changelog
+ updating the branch cache
committed changeset 5:83a687e8a97c80992ba385bbfd766be181bfb1d1
$ hg verify
--- a/tests/test-issue4074.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-issue4074.t Tue Jun 20 16:33:46 2017 -0400
@@ -16,12 +16,12 @@
Check in a big file:
- $ python ../s.py > a
+ $ $PYTHON ../s.py > a
$ hg ci -qAm0
Modify it:
- $ python ../s.py > a
+ $ $PYTHON ../s.py > a
Time a check-in, should never take more than 10 seconds user time:
--- a/tests/test-issue672.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-issue672.t Tue Jun 20 16:33:46 2017 -0400
@@ -38,7 +38,6 @@
removing 1
1a: remote created -> g
getting 1a
- 2: remote unchanged -> k
1 files updated, 0 files merged, 1 files removed, 0 files unresolved
(branch merge, don't forget to commit)
--- a/tests/test-keyword.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-keyword.t Tue Jun 20 16:33:46 2017 -0400
@@ -178,6 +178,7 @@
committing manifest
committing changelog
overwriting a expanding keywords
+ updating the branch cache
committed changeset 1:ef63ca68695bc9495032c6fda1350c71e6d256e9
running hook commit.test: cp a hooktest
$ hg status
@@ -650,6 +651,7 @@
committing manifest
committing changelog
overwriting c expanding keywords
+ updating the branch cache
committed changeset 2:25736cf2f5cbe41f6be4e6784ef6ecf9f3bbcc7d
$ cat a c
expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $
@@ -823,6 +825,7 @@
committing manifest
committing changelog
overwriting a expanding keywords
+ updating the branch cache
committed changeset 2:bb948857c743469b22bbf51f7ec8112279ca5d83
$ rm log
@@ -866,6 +869,7 @@
committing files:
committing manifest
committing changelog
+ updating the branch cache
committed changeset 3:d14c712653769de926994cf7fbb06c8fbd68f012
$ hg status
? c
@@ -941,6 +945,7 @@
committing manifest
committing changelog
overwriting a expanding keywords
+ updating the branch cache
committed changeset 2:85e279d709ffc28c9fdd1b868570985fc3d87082
$ hg export -o ../rejecttest.diff tip
$ cd ../Test
@@ -985,6 +990,7 @@
committing manifest
committing changelog
overwriting x/a expanding keywords
+ updating the branch cache
committed changeset 3:b4560182a3f9a358179fd2d835c15e9da379c1e4
$ cat a
expand $Id: x/a b4560182a3f9 Thu, 01 Jan 1970 00:00:03 +0000 user $
--- a/tests/test-largefiles-cache.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-largefiles-cache.t Tue Jun 20 16:33:46 2017 -0400
@@ -93,7 +93,7 @@
Portable way to print file permissions:
$ cat > ls-l.py <<EOF
- > #!/usr/bin/env python
+ > #!$PYTHON
> import sys, os
> path = sys.argv[1]
> print '%03o' % (os.lstat(path).st_mode & 0777)
@@ -206,6 +206,7 @@
large: data corruption in $TESTTMP/src/.hg/largefiles/e2fb5f2139d086ded2cb600d5a91a196e76bf020 with hash 6a7bb2556144babe3899b25e5428123735bb1e27 (glob)
0 largefiles updated, 0 removed
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "cd24c147f45c: modified"
[12] other heads for branch "default" (re)
$ hg st
! large
--- a/tests/test-largefiles-misc.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-largefiles-misc.t Tue Jun 20 16:33:46 2017 -0400
@@ -479,7 +479,7 @@
summary: anotherlarge
$ hg --debug log -T '{rev}: {desc}\n' ../sub/anotherlarge
- updated patterns: ['../.hglf/sub/../sub/anotherlarge', '../sub/anotherlarge']
+ updated patterns: ../.hglf/sub/../sub/anotherlarge, ../sub/anotherlarge
1: anotherlarge
$ hg log -G anotherlarge
@@ -498,18 +498,18 @@
summary: anotherlarge
$ hg --debug log -T '{rev}: {desc}\n' -G glob:another*
- updated patterns: ['glob:../.hglf/sub/another*', 'glob:another*']
+ updated patterns: glob:../.hglf/sub/another*, glob:another*
@ 1: anotherlarge
|
~
#if no-msys
$ hg --debug log -T '{rev}: {desc}\n' 'glob:../.hglf/sub/another*' # no-msys
- updated patterns: ['glob:../.hglf/sub/another*']
+ updated patterns: glob:../.hglf/sub/another*
1: anotherlarge
$ hg --debug log -G -T '{rev}: {desc}\n' 'glob:../.hglf/sub/another*' # no-msys
- updated patterns: ['glob:../.hglf/sub/another*']
+ updated patterns: glob:../.hglf/sub/another*
@ 1: anotherlarge
|
~
@@ -557,10 +557,10 @@
Log from outer space
$ hg --debug log -R addrm2 -T '{rev}: {desc}\n' 'addrm2/sub/anotherlarge'
- updated patterns: ['addrm2/.hglf/sub/anotherlarge', 'addrm2/sub/anotherlarge']
+ updated patterns: addrm2/.hglf/sub/anotherlarge, addrm2/sub/anotherlarge
1: anotherlarge
$ hg --debug log -R addrm2 -T '{rev}: {desc}\n' 'addrm2/.hglf/sub/anotherlarge'
- updated patterns: ['addrm2/.hglf/sub/anotherlarge']
+ updated patterns: addrm2/.hglf/sub/anotherlarge
1: anotherlarge
--- a/tests/test-largefiles-update.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-largefiles-update.t Tue Jun 20 16:33:46 2017 -0400
@@ -71,6 +71,7 @@
$ hg up
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "f74e50bd9e55: #2"
1 other heads for branch "default"
$ hg debugdirstate --large --nodate
n 644 7 set large1
@@ -86,6 +87,7 @@
n 644 13 set large2
$ hg up
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "f74e50bd9e55: #2"
1 other heads for branch "default"
$ hg debugdirstate --large --nodate
n 644 7 set large1
@@ -471,6 +473,7 @@
keep (l)ocal ba94c2efe5b7c5e0af8d189295ce00553b0612b7 or
take (o)ther e5bb990443d6a92aaf7223813720f7566c9dd05b? l
2 files updated, 1 files merged, 0 files removed, 0 files unresolved
+ updated to "d65e59e952a9: #5"
1 other heads for branch "default"
$ hg status -A large1
@@ -505,6 +508,7 @@
keep (l)ocal ba94c2efe5b7c5e0af8d189295ce00553b0612b7 or
take (o)ther e5bb990443d6a92aaf7223813720f7566c9dd05b? l
2 files updated, 1 files merged, 0 files removed, 0 files unresolved
+ updated to "d65e59e952a9: #5"
1 other heads for branch "default"
$ hg status -A large1
--- a/tests/test-largefiles.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-largefiles.t Tue Jun 20 16:33:46 2017 -0400
@@ -215,7 +215,7 @@
Test largefiles can be loaded in hgweb (wrapcommand() shouldn't fail)
$ cat <<EOF > "$TESTTMP/hgweb.cgi"
- > #!/usr/bin/env python
+ > #!$PYTHON
> from mercurial import demandimport; demandimport.enable()
> from mercurial.hgweb import hgweb
> from mercurial.hgweb import wsgicgi
--- a/tests/test-lock-badness.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-lock-badness.t Tue Jun 20 16:33:46 2017 -0400
@@ -14,10 +14,10 @@
Test that raising an exception in the release function doesn't cause the lock to choke
$ cat > testlock.py << EOF
- > from mercurial import cmdutil, error, error
+ > from mercurial import error, registrar
>
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
>
> def acquiretestlock(repo, releaseexc):
> def unlock():
--- a/tests/test-lock.py Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-lock.py Tue Jun 20 16:33:46 2017 -0400
@@ -260,12 +260,10 @@
lock = state.makelock(inheritchecker=check)
state.assertacquirecalled(True)
- def tryinherit():
+ with self.assertRaises(error.LockInheritanceContractViolation):
with lock.inherit():
pass
- self.assertRaises(error.LockInheritanceContractViolation, tryinherit)
-
lock.release()
def testfrequentlockunlock(self):
--- a/tests/test-log.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-log.t Tue Jun 20 16:33:46 2017 -0400
@@ -47,7 +47,7 @@
Make sure largefiles doesn't interfere with logging a regular file
$ hg --debug log a -T '{rev}: {desc}\n' --config extensions.largefiles=
- updated patterns: ['.hglf/a', 'a']
+ updated patterns: .hglf/a, a
0: a
$ hg log a
changeset: 0:9161b9aeaf16
@@ -67,7 +67,7 @@
summary: a
$ hg --debug log glob:a* -T '{rev}: {desc}\n' --config extensions.largefiles=
- updated patterns: ['glob:.hglf/a*', 'glob:a*']
+ updated patterns: glob:.hglf/a*, glob:a*
3: d
0: a
--- a/tests/test-logtoprocess.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-logtoprocess.t Tue Jun 20 16:33:46 2017 -0400
@@ -1,3 +1,5 @@
+#require no-windows
+
ATTENTION: logtoprocess runs commands asynchronously. Be sure to append "| cat"
to hg commands, to wait for the output, if you want to test its output.
Otherwise the test will be flaky.
@@ -6,10 +8,10 @@
$ hg init
$ cat > $TESTTMP/foocommand.py << EOF
- > from mercurial import cmdutil
+ > from mercurial import registrar
> from time import sleep
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
> @command('foo', [])
> def foo(ui, repo):
> ui.log('foo', 'a message: %(bar)s\n', bar='spam')
--- a/tests/test-mac-packages.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-mac-packages.t Tue Jun 20 16:33:46 2017 -0400
@@ -25,7 +25,7 @@
Spot-check some randomly selected files:
$ grep bdiff boms.txt | cut -d ' ' -f 1,2,3
- ./Library/Python/2.7/site-packages/mercurial/bdiff.so 100755 0/0
+ ./Library/Python/2.7/site-packages/mercurial/cext/bdiff.so 100755 0/0
./Library/Python/2.7/site-packages/mercurial/pure/bdiff.py 100644 0/0
./Library/Python/2.7/site-packages/mercurial/pure/bdiff.pyc 100644 0/0
./Library/Python/2.7/site-packages/mercurial/pure/bdiff.pyo 100644 0/0
@@ -35,6 +35,7 @@
./usr/local/hg/contrib/hg-completion.bash 100644 0/0
$ egrep 'man[15]' boms.txt | cut -d ' ' -f 1,2,3
./usr/local/share/man/man1 40755 0/0
+ ./usr/local/share/man/man1/chg.1 100644 0/0
./usr/local/share/man/man1/hg.1 100644 0/0
./usr/local/share/man/man5 40755 0/0
./usr/local/share/man/man5/hgignore.5 100644 0/0
@@ -48,7 +49,8 @@
./Library/Python/2.7/site-packages/mercurial/localrepo.py 100644 0/0
./Library/Python/2.7/site-packages/mercurial/localrepo.pyc 100644 0/0
./Library/Python/2.7/site-packages/mercurial/localrepo.pyo 100644 0/0
- $ grep 'bin/hg ' boms.txt | cut -d ' ' -f 1,2,3
+ $ egrep 'bin/' boms.txt | cut -d ' ' -f 1,2,3
+ ./usr/local/bin/chg 100755 0/0
./usr/local/bin/hg 100755 0/0
Make sure the built binary uses the system Python interpreter
--- a/tests/test-mactext.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-mactext.t Tue Jun 20 16:33:46 2017 -0400
@@ -24,7 +24,7 @@
$ hg add f
$ hg ci -m 1
- $ python unix2mac.py f
+ $ $PYTHON unix2mac.py f
$ hg ci -m 2
attempt to commit or push text file(s) using CR line endings
in dea860dc51ec: f
@@ -32,7 +32,7 @@
rollback completed
abort: pretxncommit.cr hook failed
[255]
- $ hg cat f | python print.py
+ $ hg cat f | $PYTHON print.py
hello<LF>
- $ cat f | python print.py
+ $ cat f | $PYTHON print.py
hello<CR>
--- a/tests/test-manifest.py Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-manifest.py Tue Jun 20 16:33:46 2017 -0400
@@ -10,107 +10,102 @@
match as matchmod,
)
-EMTPY_MANIFEST = ''
-EMTPY_MANIFEST_V2 = '\0\n'
+EMTPY_MANIFEST = b''
+EMTPY_MANIFEST_V2 = b'\0\n'
-HASH_1 = '1' * 40
+HASH_1 = b'1' * 40
BIN_HASH_1 = binascii.unhexlify(HASH_1)
-HASH_2 = 'f' * 40
+HASH_2 = b'f' * 40
BIN_HASH_2 = binascii.unhexlify(HASH_2)
-HASH_3 = '1234567890abcdef0987654321deadbeef0fcafe'
+HASH_3 = b'1234567890abcdef0987654321deadbeef0fcafe'
BIN_HASH_3 = binascii.unhexlify(HASH_3)
A_SHORT_MANIFEST = (
- 'bar/baz/qux.py\0%(hash2)s%(flag2)s\n'
- 'foo\0%(hash1)s%(flag1)s\n'
- ) % {'hash1': HASH_1,
- 'flag1': '',
- 'hash2': HASH_2,
- 'flag2': 'l',
+ b'bar/baz/qux.py\0%(hash2)s%(flag2)s\n'
+ b'foo\0%(hash1)s%(flag1)s\n'
+ ) % {b'hash1': HASH_1,
+ b'flag1': b'',
+ b'hash2': HASH_2,
+ b'flag2': b'l',
}
# Same data as A_SHORT_MANIFEST
A_SHORT_MANIFEST_V2 = (
- '\0\n'
- '\x00bar/baz/qux.py\0%(flag2)s\n%(hash2)s\n'
- '\x00foo\0%(flag1)s\n%(hash1)s\n'
- ) % {'hash1': BIN_HASH_1,
- 'flag1': '',
- 'hash2': BIN_HASH_2,
- 'flag2': 'l',
+ b'\0\n'
+ b'\x00bar/baz/qux.py\0%(flag2)s\n%(hash2)s\n'
+ b'\x00foo\0%(flag1)s\n%(hash1)s\n'
+ ) % {b'hash1': BIN_HASH_1,
+ b'flag1': b'',
+ b'hash2': BIN_HASH_2,
+ b'flag2': b'l',
}
# Same data as A_SHORT_MANIFEST
A_METADATA_MANIFEST = (
- '\0foo\0bar\n'
- '\x00bar/baz/qux.py\0%(flag2)s\0foo\0bar\n%(hash2)s\n' # flag and metadata
- '\x00foo\0%(flag1)s\0foo\n%(hash1)s\n' # no flag, but metadata
- ) % {'hash1': BIN_HASH_1,
- 'flag1': '',
- 'hash2': BIN_HASH_2,
- 'flag2': 'l',
+ b'\0foo\0bar\n'
+ b'\x00bar/baz/qux.py\0%(flag2)s\0foo\0bar\n%(hash2)s\n' # flag and metadata
+ b'\x00foo\0%(flag1)s\0foo\n%(hash1)s\n' # no flag, but metadata
+ ) % {b'hash1': BIN_HASH_1,
+ b'flag1': b'',
+ b'hash2': BIN_HASH_2,
+ b'flag2': b'l',
}
A_STEM_COMPRESSED_MANIFEST = (
- '\0\n'
- '\x00bar/baz/qux.py\0%(flag2)s\n%(hash2)s\n'
- '\x04qux/foo.py\0%(flag1)s\n%(hash1)s\n' # simple case of 4 stem chars
- '\x0az.py\0%(flag1)s\n%(hash1)s\n' # tricky newline = 10 stem characters
- '\x00%(verylongdir)sx/x\0\n%(hash1)s\n'
- '\xffx/y\0\n%(hash2)s\n' # more than 255 stem chars
- ) % {'hash1': BIN_HASH_1,
- 'flag1': '',
- 'hash2': BIN_HASH_2,
- 'flag2': 'l',
- 'verylongdir': 255 * 'x',
+ b'\0\n'
+ b'\x00bar/baz/qux.py\0%(flag2)s\n%(hash2)s\n'
+ b'\x04qux/foo.py\0%(flag1)s\n%(hash1)s\n' # simple case of 4 stem chars
+ b'\x0az.py\0%(flag1)s\n%(hash1)s\n' # tricky newline = 10 stem characters
+ b'\x00%(verylongdir)sx/x\0\n%(hash1)s\n'
+ b'\xffx/y\0\n%(hash2)s\n' # more than 255 stem chars
+ ) % {b'hash1': BIN_HASH_1,
+ b'flag1': b'',
+ b'hash2': BIN_HASH_2,
+ b'flag2': b'l',
+ b'verylongdir': 255 * b'x',
}
A_DEEPER_MANIFEST = (
- 'a/b/c/bar.py\0%(hash3)s%(flag1)s\n'
- 'a/b/c/bar.txt\0%(hash1)s%(flag1)s\n'
- 'a/b/c/foo.py\0%(hash3)s%(flag1)s\n'
- 'a/b/c/foo.txt\0%(hash2)s%(flag2)s\n'
- 'a/b/d/baz.py\0%(hash3)s%(flag1)s\n'
- 'a/b/d/qux.py\0%(hash1)s%(flag2)s\n'
- 'a/b/d/ten.txt\0%(hash3)s%(flag2)s\n'
- 'a/b/dog.py\0%(hash3)s%(flag1)s\n'
- 'a/b/fish.py\0%(hash2)s%(flag1)s\n'
- 'a/c/london.py\0%(hash3)s%(flag2)s\n'
- 'a/c/paper.txt\0%(hash2)s%(flag2)s\n'
- 'a/c/paris.py\0%(hash2)s%(flag1)s\n'
- 'a/d/apple.py\0%(hash3)s%(flag1)s\n'
- 'a/d/pizza.py\0%(hash3)s%(flag2)s\n'
- 'a/green.py\0%(hash1)s%(flag2)s\n'
- 'a/purple.py\0%(hash2)s%(flag1)s\n'
- 'app.py\0%(hash3)s%(flag1)s\n'
- 'readme.txt\0%(hash2)s%(flag1)s\n'
- ) % {'hash1': HASH_1,
- 'flag1': '',
- 'hash2': HASH_2,
- 'flag2': 'l',
- 'hash3': HASH_3,
+ b'a/b/c/bar.py\0%(hash3)s%(flag1)s\n'
+ b'a/b/c/bar.txt\0%(hash1)s%(flag1)s\n'
+ b'a/b/c/foo.py\0%(hash3)s%(flag1)s\n'
+ b'a/b/c/foo.txt\0%(hash2)s%(flag2)s\n'
+ b'a/b/d/baz.py\0%(hash3)s%(flag1)s\n'
+ b'a/b/d/qux.py\0%(hash1)s%(flag2)s\n'
+ b'a/b/d/ten.txt\0%(hash3)s%(flag2)s\n'
+ b'a/b/dog.py\0%(hash3)s%(flag1)s\n'
+ b'a/b/fish.py\0%(hash2)s%(flag1)s\n'
+ b'a/c/london.py\0%(hash3)s%(flag2)s\n'
+ b'a/c/paper.txt\0%(hash2)s%(flag2)s\n'
+ b'a/c/paris.py\0%(hash2)s%(flag1)s\n'
+ b'a/d/apple.py\0%(hash3)s%(flag1)s\n'
+ b'a/d/pizza.py\0%(hash3)s%(flag2)s\n'
+ b'a/green.py\0%(hash1)s%(flag2)s\n'
+ b'a/purple.py\0%(hash2)s%(flag1)s\n'
+ b'app.py\0%(hash3)s%(flag1)s\n'
+ b'readme.txt\0%(hash2)s%(flag1)s\n'
+ ) % {b'hash1': HASH_1,
+ b'flag1': b'',
+ b'hash2': HASH_2,
+ b'flag2': b'l',
+ b'hash3': HASH_3,
}
HUGE_MANIFEST_ENTRIES = 200001
-A_HUGE_MANIFEST = ''.join(sorted(
- 'file%d\0%s%s\n' % (i, h, f) for i, h, f in
- itertools.izip(xrange(200001),
- itertools.cycle((HASH_1, HASH_2)),
- itertools.cycle(('', 'x', 'l')))))
+izip = getattr(itertools, 'izip', zip)
+if 'xrange' not in globals():
+ xrange = range
+
+A_HUGE_MANIFEST = b''.join(sorted(
+ b'file%d\0%s%s\n' % (i, h, f) for i, h, f in
+ izip(xrange(200001),
+ itertools.cycle((HASH_1, HASH_2)),
+ itertools.cycle((b'', b'x', b'l')))))
class basemanifesttests(object):
def parsemanifest(self, text):
raise NotImplementedError('parsemanifest not implemented by test case')
- def assertIn(self, thing, container, msg=None):
- # assertIn new in 2.7, use it if available, otherwise polyfill
- sup = getattr(unittest.TestCase, 'assertIn', False)
- if sup:
- return sup(self, thing, container, msg=msg)
- if not msg:
- msg = 'Expected %r in %r' % (thing, container)
- self.assert_(thing in container, msg)
-
def testEmptyManifest(self):
m = self.parsemanifest(EMTPY_MANIFEST)
self.assertEqual(0, len(m))
@@ -123,12 +118,13 @@
def testManifest(self):
m = self.parsemanifest(A_SHORT_MANIFEST)
- self.assertEqual(['bar/baz/qux.py', 'foo'], list(m))
- self.assertEqual(BIN_HASH_2, m['bar/baz/qux.py'])
- self.assertEqual('l', m.flags('bar/baz/qux.py'))
- self.assertEqual(BIN_HASH_1, m['foo'])
- self.assertEqual('', m.flags('foo'))
- self.assertRaises(KeyError, lambda : m['wat'])
+ self.assertEqual([b'bar/baz/qux.py', b'foo'], list(m))
+ self.assertEqual(BIN_HASH_2, m[b'bar/baz/qux.py'])
+ self.assertEqual(b'l', m.flags(b'bar/baz/qux.py'))
+ self.assertEqual(BIN_HASH_1, m[b'foo'])
+ self.assertEqual(b'', m.flags(b'foo'))
+ with self.assertRaises(KeyError):
+ m[b'wat']
def testParseManifestV2(self):
m1 = self.parsemanifest(A_SHORT_MANIFEST)
@@ -143,11 +139,11 @@
def testParseManifestStemCompression(self):
m = self.parsemanifest(A_STEM_COMPRESSED_MANIFEST)
- self.assertIn('bar/baz/qux.py', m)
- self.assertIn('bar/qux/foo.py', m)
- self.assertIn('bar/qux/foz.py', m)
- self.assertIn(256 * 'x' + '/x', m)
- self.assertIn(256 * 'x' + '/y', m)
+ self.assertIn(b'bar/baz/qux.py', m)
+ self.assertIn(b'bar/qux/foo.py', m)
+ self.assertIn(b'bar/qux/foz.py', m)
+ self.assertIn(256 * b'x' + b'/x', m)
+ self.assertIn(256 * b'x' + b'/y', m)
self.assertEqual(A_STEM_COMPRESSED_MANIFEST, m.text(usemanifestv2=True))
def testTextV2(self):
@@ -159,38 +155,38 @@
want = BIN_HASH_1
m = self.parsemanifest(EMTPY_MANIFEST)
- m['a'] = want
- self.assertIn('a', m)
- self.assertEqual(want, m['a'])
- self.assertEqual('a\0' + HASH_1 + '\n', m.text())
+ m[b'a'] = want
+ self.assertIn(b'a', m)
+ self.assertEqual(want, m[b'a'])
+ self.assertEqual(b'a\0' + HASH_1 + b'\n', m.text())
m = self.parsemanifest(A_SHORT_MANIFEST)
- m['a'] = want
- self.assertEqual(want, m['a'])
- self.assertEqual('a\0' + HASH_1 + '\n' + A_SHORT_MANIFEST,
+ m[b'a'] = want
+ self.assertEqual(want, m[b'a'])
+ self.assertEqual(b'a\0' + HASH_1 + b'\n' + A_SHORT_MANIFEST,
m.text())
def testSetFlag(self):
- want = 'x'
+ want = b'x'
m = self.parsemanifest(EMTPY_MANIFEST)
# first add a file; a file-less flag makes no sense
- m['a'] = BIN_HASH_1
- m.setflag('a', want)
- self.assertEqual(want, m.flags('a'))
- self.assertEqual('a\0' + HASH_1 + want + '\n', m.text())
+ m[b'a'] = BIN_HASH_1
+ m.setflag(b'a', want)
+ self.assertEqual(want, m.flags(b'a'))
+ self.assertEqual(b'a\0' + HASH_1 + want + b'\n', m.text())
m = self.parsemanifest(A_SHORT_MANIFEST)
# first add a file; a file-less flag makes no sense
- m['a'] = BIN_HASH_1
- m.setflag('a', want)
- self.assertEqual(want, m.flags('a'))
- self.assertEqual('a\0' + HASH_1 + want + '\n' + A_SHORT_MANIFEST,
+ m[b'a'] = BIN_HASH_1
+ m.setflag(b'a', want)
+ self.assertEqual(want, m.flags(b'a'))
+ self.assertEqual(b'a\0' + HASH_1 + want + b'\n' + A_SHORT_MANIFEST,
m.text())
def testCopy(self):
m = self.parsemanifest(A_SHORT_MANIFEST)
- m['a'] = BIN_HASH_1
+ m[b'a'] = BIN_HASH_1
m2 = m.copy()
del m
del m2 # make sure we don't double free() anything
@@ -199,132 +195,135 @@
unhex = binascii.unhexlify
h1, h2 = unhex(HASH_1), unhex(HASH_2)
m = self.parsemanifest(A_SHORT_MANIFEST)
- m['alpha'] = h1
- m['beta'] = h2
- del m['foo']
- want = 'alpha\0%s\nbar/baz/qux.py\0%sl\nbeta\0%s\n' % (
+ m[b'alpha'] = h1
+ m[b'beta'] = h2
+ del m[b'foo']
+ want = b'alpha\0%s\nbar/baz/qux.py\0%sl\nbeta\0%s\n' % (
HASH_1, HASH_2, HASH_2)
self.assertEqual(want, m.text())
self.assertEqual(3, len(m))
- self.assertEqual(['alpha', 'bar/baz/qux.py', 'beta'], list(m))
- self.assertEqual(h1, m['alpha'])
- self.assertEqual(h2, m['bar/baz/qux.py'])
- self.assertEqual(h2, m['beta'])
- self.assertEqual('', m.flags('alpha'))
- self.assertEqual('l', m.flags('bar/baz/qux.py'))
- self.assertEqual('', m.flags('beta'))
- self.assertRaises(KeyError, lambda : m['foo'])
+ self.assertEqual([b'alpha', b'bar/baz/qux.py', b'beta'], list(m))
+ self.assertEqual(h1, m[b'alpha'])
+ self.assertEqual(h2, m[b'bar/baz/qux.py'])
+ self.assertEqual(h2, m[b'beta'])
+ self.assertEqual(b'', m.flags(b'alpha'))
+ self.assertEqual(b'l', m.flags(b'bar/baz/qux.py'))
+ self.assertEqual(b'', m.flags(b'beta'))
+ with self.assertRaises(KeyError):
+ m[b'foo']
def testSetGetNodeSuffix(self):
clean = self.parsemanifest(A_SHORT_MANIFEST)
m = self.parsemanifest(A_SHORT_MANIFEST)
- h = m['foo']
- f = m.flags('foo')
- want = h + 'a'
+ h = m[b'foo']
+ f = m.flags(b'foo')
+ want = h + b'a'
# Merge code wants to set 21-byte fake hashes at times
- m['foo'] = want
- self.assertEqual(want, m['foo'])
- self.assertEqual([('bar/baz/qux.py', BIN_HASH_2),
- ('foo', BIN_HASH_1 + 'a')],
+ m[b'foo'] = want
+ self.assertEqual(want, m[b'foo'])
+ self.assertEqual([(b'bar/baz/qux.py', BIN_HASH_2),
+ (b'foo', BIN_HASH_1 + b'a')],
list(m.iteritems()))
# Sometimes it even tries a 22-byte fake hash, but we can
# return 21 and it'll work out
- m['foo'] = want + '+'
- self.assertEqual(want, m['foo'])
+ m[b'foo'] = want + b'+'
+ self.assertEqual(want, m[b'foo'])
# make sure the suffix survives a copy
- match = matchmod.match('', '', ['re:foo'])
+ match = matchmod.match(b'', b'', [b're:foo'])
m2 = m.matches(match)
- self.assertEqual(want, m2['foo'])
+ self.assertEqual(want, m2[b'foo'])
self.assertEqual(1, len(m2))
m2 = m.copy()
- self.assertEqual(want, m2['foo'])
+ self.assertEqual(want, m2[b'foo'])
# suffix with iteration
- self.assertEqual([('bar/baz/qux.py', BIN_HASH_2),
- ('foo', want)],
+ self.assertEqual([(b'bar/baz/qux.py', BIN_HASH_2),
+ (b'foo', want)],
list(m.iteritems()))
# shows up in diff
- self.assertEqual({'foo': ((want, f), (h, ''))}, m.diff(clean))
- self.assertEqual({'foo': ((h, ''), (want, f))}, clean.diff(m))
+ self.assertEqual({b'foo': ((want, f), (h, b''))}, m.diff(clean))
+ self.assertEqual({b'foo': ((h, b''), (want, f))}, clean.diff(m))
def testMatchException(self):
m = self.parsemanifest(A_SHORT_MANIFEST)
- match = matchmod.match('', '', ['re:.*'])
+ match = matchmod.match(b'', b'', [b're:.*'])
def filt(path):
- if path == 'foo':
+ if path == b'foo':
assert False
return True
match.matchfn = filt
- self.assertRaises(AssertionError, m.matches, match)
+ with self.assertRaises(AssertionError):
+ m.matches(match)
def testRemoveItem(self):
m = self.parsemanifest(A_SHORT_MANIFEST)
- del m['foo']
- self.assertRaises(KeyError, lambda : m['foo'])
+ del m[b'foo']
+ with self.assertRaises(KeyError):
+ m[b'foo']
self.assertEqual(1, len(m))
self.assertEqual(1, len(list(m)))
# now restore and make sure everything works right
- m['foo'] = 'a' * 20
+ m[b'foo'] = b'a' * 20
self.assertEqual(2, len(m))
self.assertEqual(2, len(list(m)))
def testManifestDiff(self):
- MISSING = (None, '')
- addl = 'z-only-in-left\0' + HASH_1 + '\n'
- addr = 'z-only-in-right\0' + HASH_2 + 'x\n'
+ MISSING = (None, b'')
+ addl = b'z-only-in-left\0' + HASH_1 + b'\n'
+ addr = b'z-only-in-right\0' + HASH_2 + b'x\n'
left = self.parsemanifest(
- A_SHORT_MANIFEST.replace(HASH_1, HASH_3 + 'x') + addl)
+ A_SHORT_MANIFEST.replace(HASH_1, HASH_3 + b'x') + addl)
right = self.parsemanifest(A_SHORT_MANIFEST + addr)
want = {
- 'foo': ((BIN_HASH_3, 'x'),
- (BIN_HASH_1, '')),
- 'z-only-in-left': ((BIN_HASH_1, ''), MISSING),
- 'z-only-in-right': (MISSING, (BIN_HASH_2, 'x')),
+ b'foo': ((BIN_HASH_3, b'x'),
+ (BIN_HASH_1, b'')),
+ b'z-only-in-left': ((BIN_HASH_1, b''), MISSING),
+ b'z-only-in-right': (MISSING, (BIN_HASH_2, b'x')),
}
self.assertEqual(want, left.diff(right))
want = {
- 'bar/baz/qux.py': (MISSING, (BIN_HASH_2, 'l')),
- 'foo': (MISSING, (BIN_HASH_3, 'x')),
- 'z-only-in-left': (MISSING, (BIN_HASH_1, '')),
+ b'bar/baz/qux.py': (MISSING, (BIN_HASH_2, b'l')),
+ b'foo': (MISSING, (BIN_HASH_3, b'x')),
+ b'z-only-in-left': (MISSING, (BIN_HASH_1, b'')),
}
self.assertEqual(want, self.parsemanifest(EMTPY_MANIFEST).diff(left))
want = {
- 'bar/baz/qux.py': ((BIN_HASH_2, 'l'), MISSING),
- 'foo': ((BIN_HASH_3, 'x'), MISSING),
- 'z-only-in-left': ((BIN_HASH_1, ''), MISSING),
+ b'bar/baz/qux.py': ((BIN_HASH_2, b'l'), MISSING),
+ b'foo': ((BIN_HASH_3, b'x'), MISSING),
+ b'z-only-in-left': ((BIN_HASH_1, b''), MISSING),
}
self.assertEqual(want, left.diff(self.parsemanifest(EMTPY_MANIFEST)))
copy = right.copy()
- del copy['z-only-in-right']
- del right['foo']
+ del copy[b'z-only-in-right']
+ del right[b'foo']
want = {
- 'foo': (MISSING, (BIN_HASH_1, '')),
- 'z-only-in-right': ((BIN_HASH_2, 'x'), MISSING),
+ b'foo': (MISSING, (BIN_HASH_1, b'')),
+ b'z-only-in-right': ((BIN_HASH_2, b'x'), MISSING),
}
self.assertEqual(want, right.diff(copy))
short = self.parsemanifest(A_SHORT_MANIFEST)
pruned = short.copy()
- del pruned['foo']
+ del pruned[b'foo']
want = {
- 'foo': ((BIN_HASH_1, ''), MISSING),
+ b'foo': ((BIN_HASH_1, b''), MISSING),
}
self.assertEqual(want, short.diff(pruned))
want = {
- 'foo': (MISSING, (BIN_HASH_1, '')),
+ b'foo': (MISSING, (BIN_HASH_1, b'')),
}
self.assertEqual(want, pruned.diff(short))
want = {
- 'bar/baz/qux.py': None,
- 'foo': (MISSING, (BIN_HASH_1, '')),
+ b'bar/baz/qux.py': None,
+ b'foo': (MISSING, (BIN_HASH_1, b'')),
}
self.assertEqual(want, pruned.diff(short, clean=True))
def testReversedLines(self):
- backwards = ''.join(
- l + '\n' for l in reversed(A_SHORT_MANIFEST.split('\n')) if l)
+ backwards = b''.join(
+ l + b'\n' for l in reversed(A_SHORT_MANIFEST.split(b'\n')) if l)
try:
self.parsemanifest(backwards)
self.fail('Should have raised ValueError')
@@ -333,14 +332,14 @@
def testNoTerminalNewline(self):
try:
- self.parsemanifest(A_SHORT_MANIFEST + 'wat')
+ self.parsemanifest(A_SHORT_MANIFEST + b'wat')
self.fail('Should have raised ValueError')
except ValueError as v:
self.assertIn('Manifest did not end in a newline.', str(v))
def testNoNewLineAtAll(self):
try:
- self.parsemanifest('wat')
+ self.parsemanifest(b'wat')
self.fail('Should have raised ValueError')
except ValueError as v:
self.assertIn('Manifest did not end in a newline.', str(v))
@@ -356,13 +355,13 @@
the resulting manifest.'''
m = self.parsemanifest(A_HUGE_MANIFEST)
- match = matchmod.match('/', '',
- ['file1', 'file200', 'file300'], exact=True)
+ match = matchmod.match(b'/', b'',
+ [b'file1', b'file200', b'file300'], exact=True)
m2 = m.matches(match)
- w = ('file1\0%sx\n'
- 'file200\0%sl\n'
- 'file300\0%s\n') % (HASH_2, HASH_1, HASH_1)
+ w = (b'file1\0%sx\n'
+ b'file200\0%sl\n'
+ b'file300\0%s\n') % (HASH_2, HASH_1, HASH_1)
self.assertEqual(w, m2.text())
def testMatchesNonexistentFile(self):
@@ -371,13 +370,14 @@
'''
m = self.parsemanifest(A_DEEPER_MANIFEST)
- match = matchmod.match('/', '',
- ['a/b/c/bar.txt', 'a/b/d/qux.py', 'readme.txt', 'nonexistent'],
+ match = matchmod.match(b'/', b'',
+ [b'a/b/c/bar.txt', b'a/b/d/qux.py',
+ b'readme.txt', b'nonexistent'],
exact=True)
m2 = m.matches(match)
self.assertEqual(
- ['a/b/c/bar.txt', 'a/b/d/qux.py', 'readme.txt'],
+ [b'a/b/c/bar.txt', b'a/b/d/qux.py', b'readme.txt'],
m2.keys())
def testMatchesNonexistentDirectory(self):
@@ -385,7 +385,7 @@
actually exist.'''
m = self.parsemanifest(A_DEEPER_MANIFEST)
- match = matchmod.match('/', '', ['a/f'], default='relpath')
+ match = matchmod.match(b'/', b'', [b'a/f'], default=b'relpath')
m2 = m.matches(match)
self.assertEqual([], m2.keys())
@@ -396,7 +396,7 @@
m = self.parsemanifest(A_HUGE_MANIFEST)
flist = m.keys()[80:300]
- match = matchmod.match('/', '', flist, exact=True)
+ match = matchmod.match(b'/', b'', flist, exact=True)
m2 = m.matches(match)
self.assertEqual(flist, m2.keys())
@@ -405,7 +405,7 @@
'''Tests matches() for what should be a full match.'''
m = self.parsemanifest(A_DEEPER_MANIFEST)
- match = matchmod.match('/', '', [''])
+ match = matchmod.match(b'/', b'', [b''])
m2 = m.matches(match)
self.assertEqual(m.keys(), m2.keys())
@@ -415,13 +415,14 @@
match against all files within said directory.'''
m = self.parsemanifest(A_DEEPER_MANIFEST)
- match = matchmod.match('/', '', ['a/b'], default='relpath')
+ match = matchmod.match(b'/', b'', [b'a/b'], default=b'relpath')
m2 = m.matches(match)
self.assertEqual([
- 'a/b/c/bar.py', 'a/b/c/bar.txt', 'a/b/c/foo.py', 'a/b/c/foo.txt',
- 'a/b/d/baz.py', 'a/b/d/qux.py', 'a/b/d/ten.txt', 'a/b/dog.py',
- 'a/b/fish.py'], m2.keys())
+ b'a/b/c/bar.py', b'a/b/c/bar.txt', b'a/b/c/foo.py',
+ b'a/b/c/foo.txt',
+ b'a/b/d/baz.py', b'a/b/d/qux.py', b'a/b/d/ten.txt', b'a/b/dog.py',
+ b'a/b/fish.py'], m2.keys())
def testMatchesExactPath(self):
'''Tests matches() on an exact match on a directory, which should
@@ -429,7 +430,7 @@
against a directory.'''
m = self.parsemanifest(A_DEEPER_MANIFEST)
- match = matchmod.match('/', '', ['a/b'], exact=True)
+ match = matchmod.match(b'/', b'', [b'a/b'], exact=True)
m2 = m.matches(match)
self.assertEqual([], m2.keys())
@@ -439,24 +440,24 @@
when not in the root directory.'''
m = self.parsemanifest(A_DEEPER_MANIFEST)
- match = matchmod.match('/', 'a/b', ['.'], default='relpath')
+ match = matchmod.match(b'/', b'a/b', [b'.'], default=b'relpath')
m2 = m.matches(match)
self.assertEqual([
- 'a/b/c/bar.py', 'a/b/c/bar.txt', 'a/b/c/foo.py', 'a/b/c/foo.txt',
- 'a/b/d/baz.py', 'a/b/d/qux.py', 'a/b/d/ten.txt', 'a/b/dog.py',
- 'a/b/fish.py'], m2.keys())
+ b'a/b/c/bar.py', b'a/b/c/bar.txt', b'a/b/c/foo.py',
+ b'a/b/c/foo.txt', b'a/b/d/baz.py', b'a/b/d/qux.py',
+ b'a/b/d/ten.txt', b'a/b/dog.py', b'a/b/fish.py'], m2.keys())
def testMatchesWithPattern(self):
'''Tests matches() for files matching a pattern that reside
deeper than the specified directory.'''
m = self.parsemanifest(A_DEEPER_MANIFEST)
- match = matchmod.match('/', '', ['a/b/*/*.txt'])
+ match = matchmod.match(b'/', b'', [b'a/b/*/*.txt'])
m2 = m.matches(match)
self.assertEqual(
- ['a/b/c/bar.txt', 'a/b/c/foo.txt', 'a/b/d/ten.txt'],
+ [b'a/b/c/bar.txt', b'a/b/c/foo.txt', b'a/b/d/ten.txt'],
m2.keys())
class testmanifestdict(unittest.TestCase, basemanifesttests):
@@ -465,21 +466,22 @@
class testtreemanifest(unittest.TestCase, basemanifesttests):
def parsemanifest(self, text):
- return manifestmod.treemanifest('', text)
+ return manifestmod.treemanifest(b'', text)
def testWalkSubtrees(self):
m = self.parsemanifest(A_DEEPER_MANIFEST)
dirs = [s._dir for s in m.walksubtrees()]
self.assertEqual(
- sorted(['', 'a/', 'a/c/', 'a/d/', 'a/b/', 'a/b/c/', 'a/b/d/']),
+ sorted([
+ b'', b'a/', b'a/c/', b'a/d/', b'a/b/', b'a/b/c/', b'a/b/d/']),
sorted(dirs)
)
- match = matchmod.match('/', '', ['path:a/b/'])
+ match = matchmod.match(b'/', b'', [b'path:a/b/'])
dirs = [s._dir for s in m.walksubtrees(matcher=match)]
self.assertEqual(
- sorted(['a/b/', 'a/b/c/', 'a/b/d/']),
+ sorted([b'a/b/', b'a/b/c/', b'a/b/d/']),
sorted(dirs)
)
--- a/tests/test-merge-changedelete.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-merge-changedelete.t Tue Jun 20 16:33:46 2017 -0400
@@ -114,6 +114,7 @@
$ hg co -C
1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ updated to "13910f48cf7b: changed file1, removed file2, changed file3"
1 other heads for branch "default"
$ hg merge --config ui.interactive=true <<EOF
@@ -178,6 +179,7 @@
$ hg co -C
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "13910f48cf7b: changed file1, removed file2, changed file3"
1 other heads for branch "default"
$ hg merge --config ui.interactive=true <<EOF
@@ -254,6 +256,7 @@
$ hg co -C
2 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ updated to "13910f48cf7b: changed file1, removed file2, changed file3"
1 other heads for branch "default"
$ hg merge --config ui.interactive=true <<EOF
@@ -316,6 +319,7 @@
$ hg co -C
2 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ updated to "13910f48cf7b: changed file1, removed file2, changed file3"
1 other heads for branch "default"
$ hg merge --tool :local
@@ -364,6 +368,7 @@
$ hg co -C
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "13910f48cf7b: changed file1, removed file2, changed file3"
1 other heads for branch "default"
$ hg merge --tool :other
@@ -412,6 +417,7 @@
$ hg co -C
2 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ updated to "13910f48cf7b: changed file1, removed file2, changed file3"
1 other heads for branch "default"
$ hg merge --tool :fail
@@ -463,6 +469,7 @@
$ hg co -C
1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ updated to "13910f48cf7b: changed file1, removed file2, changed file3"
1 other heads for branch "default"
$ hg merge --config ui.interactive=True --tool :prompt
@@ -470,8 +477,7 @@
use (c)hanged version, (d)elete, or leave (u)nresolved?
other [merge rev] changed file2 which local [working copy] deleted
use (c)hanged version, leave (d)eleted, or leave (u)nresolved?
- no tool found to merge file3
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved?
+ keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for file3?
0 files updated, 0 files merged, 0 files removed, 3 files unresolved
use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
[1]
@@ -522,6 +528,7 @@
$ hg co -C
1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ updated to "13910f48cf7b: changed file1, removed file2, changed file3"
1 other heads for branch "default"
$ hg merge --tool :prompt
@@ -529,8 +536,7 @@
use (c)hanged version, (d)elete, or leave (u)nresolved? u
other [merge rev] changed file2 which local [working copy] deleted
use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
- no tool found to merge file3
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved? u
+ keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for file3? u
0 files updated, 0 files merged, 0 files removed, 3 files unresolved
use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
[1]
@@ -579,6 +585,7 @@
$ hg co -C
1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ updated to "13910f48cf7b: changed file1, removed file2, changed file3"
1 other heads for branch "default"
$ hg merge --tool :merge3
@@ -676,8 +683,7 @@
use (c)hanged version, (d)elete, or leave (u)nresolved?
other [merge rev] changed file2 which local [working copy] deleted
use (c)hanged version, leave (d)eleted, or leave (u)nresolved?
- no tool found to merge file3
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved?
+ keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for file3?
--- diff of status ---
(status identical)
@@ -705,8 +711,7 @@
use (c)hanged version, (d)elete, or leave (u)nresolved?
other [merge rev] changed file2 which local [working copy] deleted
use (c)hanged version, leave (d)eleted, or leave (u)nresolved?
- no tool found to merge file3
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved?
+ keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for file3?
--- diff of status ---
(status identical)
@@ -724,8 +729,7 @@
use (c)hanged version, (d)elete, or leave (u)nresolved?
other [merge rev] changed file2 which local [working copy] deleted
use (c)hanged version, leave (d)eleted, or leave (u)nresolved?
- no tool found to merge file3
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved?
+ keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for file3?
--- diff of status ---
(status identical)
--- a/tests/test-merge-default.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-merge-default.t Tue Jun 20 16:33:46 2017 -0400
@@ -33,6 +33,7 @@
$ hg up
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "f25cbe84d8b3: e"
2 other heads for branch "default"
Should fail because > 2 heads:
--- a/tests/test-merge-force.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-merge-force.t Tue Jun 20 16:33:46 2017 -0400
@@ -10,26 +10,26 @@
Create base changeset
- $ python $TESTDIR/generate-working-copy-states.py state 3 1
+ $ $PYTHON $TESTDIR/generate-working-copy-states.py state 3 1
$ hg addremove -q --similarity 0
$ hg commit -qm 'base'
Create remote changeset
- $ python $TESTDIR/generate-working-copy-states.py state 3 2
+ $ $PYTHON $TESTDIR/generate-working-copy-states.py state 3 2
$ hg addremove -q --similarity 0
$ hg commit -qm 'remote'
Create local changeset
$ hg update -q 0
- $ python $TESTDIR/generate-working-copy-states.py state 3 3
+ $ $PYTHON $TESTDIR/generate-working-copy-states.py state 3 3
$ hg addremove -q --similarity 0
$ hg commit -qm 'local'
Set up working directory
- $ python $TESTDIR/generate-working-copy-states.py state 3 wc
+ $ $PYTHON $TESTDIR/generate-working-copy-states.py state 3 wc
$ hg addremove -q --similarity 0
$ hg forget *_*_*_*-untracked
$ rm *_*_*_missing-*
@@ -286,7 +286,7 @@
the remote side did not touch the file
$ checkstatus() {
- > for f in `python $TESTDIR/generate-working-copy-states.py filelist 3`
+ > for f in `$PYTHON $TESTDIR/generate-working-copy-states.py filelist 3`
> do
> echo
> hg status -A $f
@@ -667,7 +667,7 @@
missing_missing_missing_missing-untracked: * (glob)
<missing>
- $ for f in `python $TESTDIR/generate-working-copy-states.py filelist 3`
+ $ for f in `$PYTHON $TESTDIR/generate-working-copy-states.py filelist 3`
> do
> if test -f ${f}.orig
> then
@@ -784,7 +784,7 @@
$ hg -q update --clean 2
$ hg --config extensions.purge= purge
- $ python $TESTDIR/generate-working-copy-states.py state 3 wc
+ $ $PYTHON $TESTDIR/generate-working-copy-states.py state 3 wc
$ hg addremove -q --similarity 0
$ hg forget *_*_*_*-untracked
$ rm *_*_*_missing-*
--- a/tests/test-merge-symlinks.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-merge-symlinks.t Tue Jun 20 16:33:46 2017 -0400
@@ -1,5 +1,5 @@
$ cat > echo.py <<EOF
- > #!/usr/bin/env python
+ > #!$PYTHON
> import os, sys
> try:
> import msvcrt
@@ -36,7 +36,7 @@
Merge them and display *_ISLINK vars
merge heads
- $ hg merge --tool="python ../echo.py"
+ $ hg merge --tool="$PYTHON ../echo.py"
merging l
HG_FILE l
HG_MY_ISLINK 1
@@ -52,7 +52,7 @@
$ hg up -C 2
1 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ hg copy l l2
- $ HGMERGE="python ../echo.py" hg up 3
+ $ HGMERGE="$PYTHON ../echo.py" hg up 3
merging l2
HG_FILE l2
HG_MY_ISLINK 1
--- a/tests/test-merge-tools.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-merge-tools.t Tue Jun 20 16:33:46 2017 -0400
@@ -392,9 +392,9 @@
true.executable=cat
# hg update -C 1
$ hg merge -r 2 --config merge-patterns.f=true --config merge-tools.true.executable=nonexistentmergetool
- couldn't find merge tool true specified for f
+ couldn't find merge tool true (for pattern f)
merging f
- couldn't find merge tool true specified for f
+ couldn't find merge tool true (for pattern f)
merging f failed!
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
@@ -418,9 +418,9 @@
true.executable=cat
# hg update -C 1
$ hg merge -r 2 --config merge-patterns.f=true --config merge-tools.true.executable=/nonexistent/mergetool
- couldn't find merge tool true specified for f
+ couldn't find merge tool true (for pattern f)
merging f
- couldn't find merge tool true specified for f
+ couldn't find merge tool true (for pattern f)
merging f failed!
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
@@ -531,8 +531,7 @@
true.executable=cat
# hg update -C 1
$ hg merge -r 2 --config ui.merge=internal:prompt
- no tool found to merge f
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved? u
+ keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for f? u
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
[1]
@@ -556,8 +555,7 @@
$ hg merge -r 2 --config ui.merge=:prompt --config ui.interactive=True << EOF
> u
> EOF
- no tool found to merge f
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved? u
+ keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for f? u
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
[1]
@@ -579,8 +577,7 @@
true.executable=cat
# hg update -C 1
$ hg merge -r 2 --config ui.merge=internal:prompt --config ui.interactive=true
- no tool found to merge f
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved?
+ keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for f?
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
[1]
@@ -593,8 +590,7 @@
# hg resolve --list
U f
$ hg resolve --all --config ui.merge=internal:prompt --config ui.interactive=true
- no tool found to merge f
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved?
+ keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for f?
[1]
$ aftermerge
# cat f
@@ -607,8 +603,7 @@
U f
$ rm f
$ hg resolve --all --config ui.merge=internal:prompt --config ui.interactive=true
- no tool found to merge f
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved?
+ keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for f?
[1]
$ aftermerge
# cat f
@@ -619,8 +614,7 @@
# hg resolve --list
U f
$ hg resolve --all --config ui.merge=internal:prompt
- no tool found to merge f
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved? u
+ keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for f? u
[1]
$ aftermerge
# cat f
@@ -677,6 +671,72 @@
space
$ rm f.base f.local f.other
+check that internal:dump doesn't dump files if premerge runs
+successfully
+
+ $ beforemerge
+ [merge-tools]
+ false.whatever=
+ true.priority=1
+ true.executable=cat
+ # hg update -C 1
+ $ hg merge -r 3 --config ui.merge=internal:dump
+ merging f
+ 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+
+ $ aftermerge
+ # cat f
+ revision 1
+ space
+ revision 3
+ # hg stat
+ M f
+ # hg resolve --list
+ R f
+
+check that internal:forcedump dumps files, even if local and other can
+be merged easily
+
+ $ beforemerge
+ [merge-tools]
+ false.whatever=
+ true.priority=1
+ true.executable=cat
+ # hg update -C 1
+ $ hg merge -r 3 --config ui.merge=internal:forcedump
+ merging f
+ 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+ use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ [1]
+ $ aftermerge
+ # cat f
+ revision 1
+ space
+ # hg stat
+ M f
+ ? f.base
+ ? f.local
+ ? f.orig
+ ? f.other
+ # hg resolve --list
+ U f
+
+ $ cat f.base
+ revision 0
+ space
+
+ $ cat f.local
+ revision 1
+ space
+
+ $ cat f.other
+ revision 0
+ space
+ revision 3
+
+ $ rm -f f.base f.local f.other
+
ui.merge specifies internal:other but is overruled by pattern for false:
$ beforemerge
@@ -1221,3 +1281,68 @@
*/f~base.?????? $TESTTMP/f.txt.orig */f~other.??????.txt $TESTTMP/f.txt (glob)
0 files updated, 1 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
+
+Check that debugpicktool examines which merge tool is chosen for
+specified file as expected
+
+ $ beforemerge
+ [merge-tools]
+ false.whatever=
+ true.priority=1
+ true.executable=cat
+ # hg update -C 1
+
+(default behavior: checking files in the working parent context)
+
+ $ hg manifest
+ f
+ $ hg debugpickmergetool
+ f = true
+
+(-X/-I and file patterns limmit examination targets)
+
+ $ hg debugpickmergetool -X f
+ $ hg debugpickmergetool unknown
+ unknown: no such file in rev ef83787e2614
+
+(--changedelete emulates merging change and delete)
+
+ $ hg debugpickmergetool --changedelete
+ f = :prompt
+
+(-r REV causes checking files in specified revision)
+
+ $ hg manifest -r tip
+ f.txt
+ $ hg debugpickmergetool -r tip
+ f.txt = true
+
+#if symlink
+
+(symlink causes chosing :prompt)
+
+ $ hg debugpickmergetool -r 6d00b3726f6e
+ f = :prompt
+
+#endif
+
+(--verbose shows some configurations)
+
+ $ hg debugpickmergetool --tool foobar -v
+ with --tool 'foobar'
+ f = foobar
+
+ $ HGMERGE=false hg debugpickmergetool -v
+ with HGMERGE='false'
+ f = false
+
+ $ hg debugpickmergetool --config ui.merge=false -v
+ with ui.merge='false'
+ f = false
+
+(--debug shows errors detected intermediately)
+
+ $ hg debugpickmergetool --config merge-patterns.f=true --config merge-tools.true.executable=nonexistentmergetool --debug f
+ couldn't find merge tool true (for pattern f)
+ couldn't find merge tool true
+ f = false
--- a/tests/test-merge-types.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-merge-types.t Tue Jun 20 16:33:46 2017 -0400
@@ -155,6 +155,7 @@
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg up
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "521a1e40188f: symlink"
1 other heads for branch "default"
$ hg st
? a.orig
@@ -171,11 +172,12 @@
preserving a for resolve of a
a: versions differ -> m (premerge)
(couldn't find merge tool hgmerge|tool hgmerge can't handle symlinks) (re)
+ no tool found to merge a
picked tool ':prompt' for a (binary False symlink True changedelete False)
- no tool found to merge a
- keep (l)ocal [working copy], take (o)ther [destination], or leave (u)nresolved? u
+ keep (l)ocal [working copy], take (o)ther [destination], or leave (u)nresolved for a? u
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
use 'hg resolve' to retry unresolved file merges
+ updated to "521a1e40188f: symlink"
1 other heads for branch "default"
[1]
$ hg diff --git
--- a/tests/test-mq-eol.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-mq-eol.t Tue Jun 20 16:33:46 2017 -0400
@@ -48,7 +48,7 @@
$ hg ci -Am adda
adding .hgignore
adding a
- $ python ../makepatch.py
+ $ $PYTHON ../makepatch.py
$ hg qimport eol.diff
adding eol.diff to series file
@@ -85,7 +85,7 @@
applying eol.diff
now at: eol.diff
$ hg qrefresh
- $ python ../cateol.py .hg/patches/eol.diff
+ $ $PYTHON ../cateol.py .hg/patches/eol.diff
# HG changeset patch<LF>
# Parent 0d0bf99a8b7a3842c6f8ef09e34f69156c4bd9d0<LF>
test message<LF>
@@ -106,7 +106,7 @@
+d<CR><LF>
+z<LF>
\ No newline at end of file<LF>
- $ python ../cateol.py a
+ $ $PYTHON ../cateol.py a
a<CR><LF>
y<CR><LF>
c<CR><LF>
@@ -121,7 +121,7 @@
$ hg --config patch.eol='CRLF' qpush
applying eol.diff
now at: eol.diff
- $ python ../cateol.py a
+ $ $PYTHON ../cateol.py a
a<CR><LF>
y<CR><LF>
c<CR><LF>
@@ -136,7 +136,7 @@
$ hg qpush
applying eol.diff
now at: eol.diff
- $ python ../cateol.py a
+ $ $PYTHON ../cateol.py a
a<CR><LF>
y<CR><LF>
c<CR><LF>
--- a/tests/test-mq-missingfiles.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-mq-missingfiles.t Tue Jun 20 16:33:46 2017 -0400
@@ -23,11 +23,11 @@
$ hg init normal
$ cd normal
- $ python ../writelines.py b 10 'a\n'
+ $ $PYTHON ../writelines.py b 10 'a\n'
$ hg ci -Am addb
adding b
$ echo a > a
- $ python ../writelines.py b 2 'b\n' 10 'a\n' 2 'c\n'
+ $ $PYTHON ../writelines.py b 2 'b\n' 10 'a\n' 2 'c\n'
$ echo c > c
$ hg add a c
$ hg qnew -f changeb
@@ -82,7 +82,7 @@
$ hg up -qC 0
$ echo a > a
$ hg mv b bb
- $ python ../writelines.py bb 2 'b\n' 10 'a\n' 2 'c\n'
+ $ $PYTHON ../writelines.py bb 2 'b\n' 10 'a\n' 2 'c\n'
$ echo c > c
$ hg add a c
$ hg qnew changebb
@@ -129,11 +129,11 @@
$ hg init git
$ cd git
- $ python ../writelines.py b 1 '\x00'
+ $ $PYTHON ../writelines.py b 1 '\x00'
$ hg ci -Am addb
adding b
$ echo a > a
- $ python ../writelines.py b 1 '\x01' 1 '\x00'
+ $ $PYTHON ../writelines.py b 1 '\x01' 1 '\x00'
$ echo c > c
$ hg add a c
$ hg qnew -f changeb
--- a/tests/test-mq-qimport.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-mq-qimport.t Tue Jun 20 16:33:46 2017 -0400
@@ -151,10 +151,10 @@
build diff with CRLF
- $ python ../writelines.py b 5 'a\n' 5 'a\r\n'
+ $ $PYTHON ../writelines.py b 5 'a\n' 5 'a\r\n'
$ hg ci -Am addb
adding b
- $ python ../writelines.py b 2 'a\n' 10 'b\n' 2 'a\r\n'
+ $ $PYTHON ../writelines.py b 2 'a\n' 10 'b\n' 2 'a\r\n'
$ hg diff > b.diff
$ hg up -C
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-mq-symlinks.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-mq-symlinks.t Tue Jun 20 16:33:46 2017 -0400
@@ -51,6 +51,7 @@
a
committing manifest
committing changelog
+ updating the branch cache
now at: updatelink
$ readlink.py a
a -> c
--- a/tests/test-mq.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-mq.t Tue Jun 20 16:33:46 2017 -0400
@@ -1137,9 +1137,9 @@
> path = sys.argv[1]
> open(path, 'wb').write('BIN\x00ARY')
> EOF
- $ python writebin.py bucephalus
+ $ $PYTHON writebin.py bucephalus
- $ python "$TESTDIR/md5sum.py" bucephalus
+ $ $PYTHON "$TESTDIR/md5sum.py" bucephalus
8ba2a2f3e77b55d03051ff9c24ad65e7 bucephalus
$ hg add bucephalus
$ hg qnew -f --git addbucephalus
@@ -1158,7 +1158,7 @@
applying addbucephalus
now at: addbucephalus
$ test -f bucephalus
- $ python "$TESTDIR/md5sum.py" bucephalus
+ $ $PYTHON "$TESTDIR/md5sum.py" bucephalus
8ba2a2f3e77b55d03051ff9c24ad65e7 bucephalus
--- a/tests/test-newcgi.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-newcgi.t Tue Jun 20 16:33:46 2017 -0400
@@ -5,7 +5,7 @@
$ hg init test
$ cat >hgweb.cgi <<HGWEB
- > #!/usr/bin/env python
+ > #!$PYTHON
> #
> # An example CGI script to use hgweb, edit as necessary
>
@@ -31,7 +31,7 @@
> HGWEBDIRCONF
$ cat >hgwebdir.cgi <<HGWEBDIR
- > #!/usr/bin/env python
+ > #!$PYTHON
> #
> # An example CGI script to export multiple hgweb repos, edit as necessary
>
--- a/tests/test-newercgi.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-newercgi.t Tue Jun 20 16:33:46 2017 -0400
@@ -5,7 +5,7 @@
$ hg init test
$ cat >hgweb.cgi <<HGWEB
- > #!/usr/bin/env python
+ > #!$PYTHON
> #
> # An example CGI script to use hgweb, edit as necessary
>
@@ -28,7 +28,7 @@
> HGWEBDIRCONF
$ cat >hgwebdir.cgi <<HGWEBDIR
- > #!/usr/bin/env python
+ > #!$PYTHON
> #
> # An example CGI script to export multiple hgweb repos, edit as necessary
>
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-obsmarker-template.t Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,888 @@
+This test file test the various templates related to obsmarkers.
+
+Global setup
+============
+
+ $ . $TESTDIR/testlib/obsmarker-common.sh
+ $ cat >> $HGRCPATH <<EOF
+ > [ui]
+ > interactive = true
+ > [phases]
+ > publish=False
+ > [experimental]
+ > evolution=all
+ > [alias]
+ > tlog = log -G -T '{node|short}\
+ > {if(predecessors, "\n Predecessors: {predecessors}")}\
+ > {if(predecessors, "\n semi-colon: {join(predecessors, "; ")}")}\
+ > {if(predecessors, "\n json: {predecessors|json}")}\
+ > {if(predecessors, "\n map: {join(predecessors % "{rev}:{node}", " ")}")}\n'
+ > EOF
+
+Test templates on amended commit
+================================
+
+Test setup
+----------
+
+ $ hg init $TESTTMP/templates-local-amend
+ $ cd $TESTTMP/templates-local-amend
+ $ mkcommit ROOT
+ $ mkcommit A0
+ $ echo 42 >> A0
+ $ hg commit --amend -m "A1"
+ $ hg commit --amend -m "A2"
+
+ $ hg log --hidden -G
+ @ changeset: 4:d004c8f274b9
+ | tag: tip
+ | parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A2
+ |
+ | x changeset: 3:a468dc9b3633
+ |/ parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A1
+ |
+ | x changeset: 2:f137d23bb3e1
+ | | user: test
+ | | date: Thu Jan 01 00:00:00 1970 +0000
+ | | summary: temporary amend commit for 471f378eab4c
+ | |
+ | x changeset: 1:471f378eab4c
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+Check templates
+---------------
+ $ hg up 'desc(A0)' --hidden
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Predecessors template should show current revision as it is the working copy
+ $ hg tlog
+ o d004c8f274b9
+ | Predecessors: 1:471f378eab4c
+ | semi-colon: 1:471f378eab4c
+ | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
+ | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
+ | @ 471f378eab4c
+ |/
+ o ea207398892e
+
+ $ hg up 'desc(A1)' --hidden
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Predecessors template should show current revision as it is the working copy
+ $ hg tlog
+ o d004c8f274b9
+ | Predecessors: 3:a468dc9b3633
+ | semi-colon: 3:a468dc9b3633
+ | json: ["a468dc9b36338b14fdb7825f55ce3df4e71517ad"]
+ | map: 3:a468dc9b36338b14fdb7825f55ce3df4e71517ad
+ | @ a468dc9b3633
+ |/
+ o ea207398892e
+
+Predecessors template should show all the predecessors as we force their display
+with --hidden
+ $ hg tlog --hidden
+ o d004c8f274b9
+ | Predecessors: 3:a468dc9b3633
+ | semi-colon: 3:a468dc9b3633
+ | json: ["a468dc9b36338b14fdb7825f55ce3df4e71517ad"]
+ | map: 3:a468dc9b36338b14fdb7825f55ce3df4e71517ad
+ | @ a468dc9b3633
+ |/ Predecessors: 1:471f378eab4c
+ | semi-colon: 1:471f378eab4c
+ | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
+ | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
+ | x f137d23bb3e1
+ | |
+ | x 471f378eab4c
+ |/
+ o ea207398892e
+
+
+Predecessors template shouldn't show anything as all obsolete commit are not
+visible.
+ $ hg up 'desc(A2)'
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg tlog
+ @ d004c8f274b9
+ |
+ o ea207398892e
+
+ $ hg tlog --hidden
+ @ d004c8f274b9
+ | Predecessors: 3:a468dc9b3633
+ | semi-colon: 3:a468dc9b3633
+ | json: ["a468dc9b36338b14fdb7825f55ce3df4e71517ad"]
+ | map: 3:a468dc9b36338b14fdb7825f55ce3df4e71517ad
+ | x a468dc9b3633
+ |/ Predecessors: 1:471f378eab4c
+ | semi-colon: 1:471f378eab4c
+ | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
+ | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
+ | x f137d23bb3e1
+ | |
+ | x 471f378eab4c
+ |/
+ o ea207398892e
+
+
+Test templates with splitted commit
+===================================
+
+ $ hg init $TESTTMP/templates-local-split
+ $ cd $TESTTMP/templates-local-split
+ $ mkcommit ROOT
+ $ echo 42 >> a
+ $ echo 43 >> b
+ $ hg commit -A -m "A0"
+ adding a
+ adding b
+ $ hg log --hidden -G
+ @ changeset: 1:471597cad322
+ | tag: tip
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+# Simulate split
+ $ hg up -r "desc(ROOT)"
+ 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ $ echo 42 >> a
+ $ hg commit -A -m "A0"
+ adding a
+ created new head
+ $ echo 43 >> b
+ $ hg commit -A -m "A0"
+ adding b
+ $ hg debugobsolete `getid "1"` `getid "2"` `getid "3"`
+
+ $ hg log --hidden -G
+ @ changeset: 3:f257fde29c7a
+ | tag: tip
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 2:337fec4d2edc
+ | parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ | x changeset: 1:471597cad322
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+Check templates
+---------------
+
+ $ hg up 'obsolete()' --hidden
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Predecessors template should show current revision as it is the working copy
+ $ hg tlog
+ o f257fde29c7a
+ | Predecessors: 1:471597cad322
+ | semi-colon: 1:471597cad322
+ | json: ["471597cad322d1f659bb169751be9133dad92ef3"]
+ | map: 1:471597cad322d1f659bb169751be9133dad92ef3
+ o 337fec4d2edc
+ | Predecessors: 1:471597cad322
+ | semi-colon: 1:471597cad322
+ | json: ["471597cad322d1f659bb169751be9133dad92ef3"]
+ | map: 1:471597cad322d1f659bb169751be9133dad92ef3
+ | @ 471597cad322
+ |/
+ o ea207398892e
+
+ $ hg up f257fde29c7a
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Predecessors template should not show a predecessor as it's not displayed in
+the log
+ $ hg tlog
+ @ f257fde29c7a
+ |
+ o 337fec4d2edc
+ |
+ o ea207398892e
+
+Predecessors template should show both predecessors as we force their display
+with --hidden
+ $ hg tlog --hidden
+ @ f257fde29c7a
+ | Predecessors: 1:471597cad322
+ | semi-colon: 1:471597cad322
+ | json: ["471597cad322d1f659bb169751be9133dad92ef3"]
+ | map: 1:471597cad322d1f659bb169751be9133dad92ef3
+ o 337fec4d2edc
+ | Predecessors: 1:471597cad322
+ | semi-colon: 1:471597cad322
+ | json: ["471597cad322d1f659bb169751be9133dad92ef3"]
+ | map: 1:471597cad322d1f659bb169751be9133dad92ef3
+ | x 471597cad322
+ |/
+ o ea207398892e
+
+Test templates with folded commit
+=================================
+
+Test setup
+----------
+
+ $ hg init $TESTTMP/templates-local-fold
+ $ cd $TESTTMP/templates-local-fold
+ $ mkcommit ROOT
+ $ mkcommit A0
+ $ mkcommit B0
+ $ hg log --hidden -G
+ @ changeset: 2:0dec01379d3b
+ | tag: tip
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: B0
+ |
+ o changeset: 1:471f378eab4c
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+Simulate a fold
+ $ hg up -r "desc(ROOT)"
+ 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ $ echo "A0" > A0
+ $ echo "B0" > B0
+ $ hg commit -A -m "C0"
+ adding A0
+ adding B0
+ created new head
+ $ hg debugobsolete `getid "desc(A0)"` `getid "desc(C0)"`
+ $ hg debugobsolete `getid "desc(B0)"` `getid "desc(C0)"`
+
+ $ hg log --hidden -G
+ @ changeset: 3:eb5a0daa2192
+ | tag: tip
+ | parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: C0
+ |
+ | x changeset: 2:0dec01379d3b
+ | | user: test
+ | | date: Thu Jan 01 00:00:00 1970 +0000
+ | | summary: B0
+ | |
+ | x changeset: 1:471f378eab4c
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+Check templates
+---------------
+
+ $ hg up 'desc(A0)' --hidden
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+
+Predecessors template should show current revision as it is the working copy
+ $ hg tlog
+ o eb5a0daa2192
+ | Predecessors: 1:471f378eab4c
+ | semi-colon: 1:471f378eab4c
+ | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
+ | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
+ | @ 471f378eab4c
+ |/
+ o ea207398892e
+
+ $ hg up 'desc(B0)' --hidden
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Predecessors template should show both predecessors as they should be both
+displayed
+ $ hg tlog
+ o eb5a0daa2192
+ | Predecessors: 2:0dec01379d3b 1:471f378eab4c
+ | semi-colon: 2:0dec01379d3b; 1:471f378eab4c
+ | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5", "471f378eab4c5e25f6c77f785b27c936efb22874"]
+ | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5 1:471f378eab4c5e25f6c77f785b27c936efb22874
+ | @ 0dec01379d3b
+ | |
+ | x 471f378eab4c
+ |/
+ o ea207398892e
+
+ $ hg up 'desc(C0)'
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Predecessors template should not show predecessors as they are not displayed in
+the log
+ $ hg tlog
+ @ eb5a0daa2192
+ |
+ o ea207398892e
+
+Predecessors template should show both predecessors as we force their display
+with --hidden
+ $ hg tlog --hidden
+ @ eb5a0daa2192
+ | Predecessors: 2:0dec01379d3b 1:471f378eab4c
+ | semi-colon: 2:0dec01379d3b; 1:471f378eab4c
+ | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5", "471f378eab4c5e25f6c77f785b27c936efb22874"]
+ | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5 1:471f378eab4c5e25f6c77f785b27c936efb22874
+ | x 0dec01379d3b
+ | |
+ | x 471f378eab4c
+ |/
+ o ea207398892e
+
+
+Test templates with divergence
+==============================
+
+Test setup
+----------
+
+ $ hg init $TESTTMP/templates-local-divergence
+ $ cd $TESTTMP/templates-local-divergence
+ $ mkcommit ROOT
+ $ mkcommit A0
+ $ hg commit --amend -m "A1"
+ $ hg log --hidden -G
+ @ changeset: 2:fdf9bde5129a
+ | tag: tip
+ | parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A1
+ |
+ | x changeset: 1:471f378eab4c
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+ $ hg update --hidden 'desc(A0)'
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg commit --amend -m "A2"
+ $ hg log --hidden -G
+ @ changeset: 3:65b757b745b9
+ | tag: tip
+ | parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | trouble: divergent
+ | summary: A2
+ |
+ | o changeset: 2:fdf9bde5129a
+ |/ parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | trouble: divergent
+ | summary: A1
+ |
+ | x changeset: 1:471f378eab4c
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+ $ hg commit --amend -m 'A3'
+ $ hg log --hidden -G
+ @ changeset: 4:019fadeab383
+ | tag: tip
+ | parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | trouble: divergent
+ | summary: A3
+ |
+ | x changeset: 3:65b757b745b9
+ |/ parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A2
+ |
+ | o changeset: 2:fdf9bde5129a
+ |/ parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | trouble: divergent
+ | summary: A1
+ |
+ | x changeset: 1:471f378eab4c
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+
+Check templates
+---------------
+
+ $ hg up 'desc(A0)' --hidden
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Predecessors template should show current revision as it is the working copy
+ $ hg tlog
+ o 019fadeab383
+ | Predecessors: 1:471f378eab4c
+ | semi-colon: 1:471f378eab4c
+ | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
+ | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
+ | o fdf9bde5129a
+ |/ Predecessors: 1:471f378eab4c
+ | semi-colon: 1:471f378eab4c
+ | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
+ | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
+ | @ 471f378eab4c
+ |/
+ o ea207398892e
+
+ $ hg up 'desc(A1)'
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Predecessors template should not show predecessors as they are not displayed in
+the log
+ $ hg tlog
+ o 019fadeab383
+ |
+ | @ fdf9bde5129a
+ |/
+ o ea207398892e
+
+Predecessors template should the predecessors as we force their display with
+--hidden
+ $ hg tlog --hidden
+ o 019fadeab383
+ | Predecessors: 3:65b757b745b9
+ | semi-colon: 3:65b757b745b9
+ | json: ["65b757b745b935093c87a2bccd877521cccffcbd"]
+ | map: 3:65b757b745b935093c87a2bccd877521cccffcbd
+ | x 65b757b745b9
+ |/ Predecessors: 1:471f378eab4c
+ | semi-colon: 1:471f378eab4c
+ | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
+ | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
+ | @ fdf9bde5129a
+ |/ Predecessors: 1:471f378eab4c
+ | semi-colon: 1:471f378eab4c
+ | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
+ | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
+ | x 471f378eab4c
+ |/
+ o ea207398892e
+
+
+Test templates with amended + folded commit
+===========================================
+
+Test setup
+----------
+
+ $ hg init $TESTTMP/templates-local-amend-fold
+ $ cd $TESTTMP/templates-local-amend-fold
+ $ mkcommit ROOT
+ $ mkcommit A0
+ $ mkcommit B0
+ $ hg commit --amend -m "B1"
+ $ hg log --hidden -G
+ @ changeset: 3:b7ea6d14e664
+ | tag: tip
+ | parent: 1:471f378eab4c
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: B1
+ |
+ | x changeset: 2:0dec01379d3b
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: B0
+ |
+ o changeset: 1:471f378eab4c
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+# Simulate a fold
+ $ hg up -r "desc(ROOT)"
+ 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ $ echo "A0" > A0
+ $ echo "B0" > B0
+ $ hg commit -A -m "C0"
+ adding A0
+ adding B0
+ created new head
+ $ hg debugobsolete `getid "desc(A0)"` `getid "desc(C0)"`
+ $ hg debugobsolete `getid "desc(B1)"` `getid "desc(C0)"`
+
+ $ hg log --hidden -G
+ @ changeset: 4:eb5a0daa2192
+ | tag: tip
+ | parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: C0
+ |
+ | x changeset: 3:b7ea6d14e664
+ | | parent: 1:471f378eab4c
+ | | user: test
+ | | date: Thu Jan 01 00:00:00 1970 +0000
+ | | summary: B1
+ | |
+ | | x changeset: 2:0dec01379d3b
+ | |/ user: test
+ | | date: Thu Jan 01 00:00:00 1970 +0000
+ | | summary: B0
+ | |
+ | x changeset: 1:471f378eab4c
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+Check templates
+---------------
+
+ $ hg up 'desc(A0)' --hidden
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+
+Predecessors template should show current revision as it is the working copy
+ $ hg tlog
+ o eb5a0daa2192
+ | Predecessors: 1:471f378eab4c
+ | semi-colon: 1:471f378eab4c
+ | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
+ | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
+ | @ 471f378eab4c
+ |/
+ o ea207398892e
+
+ $ hg up 'desc(B0)' --hidden
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Predecessors template should both predecessors as they are visible
+ $ hg tlog
+ o eb5a0daa2192
+ | Predecessors: 2:0dec01379d3b 1:471f378eab4c
+ | semi-colon: 2:0dec01379d3b; 1:471f378eab4c
+ | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5", "471f378eab4c5e25f6c77f785b27c936efb22874"]
+ | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5 1:471f378eab4c5e25f6c77f785b27c936efb22874
+ | @ 0dec01379d3b
+ | |
+ | x 471f378eab4c
+ |/
+ o ea207398892e
+
+ $ hg up 'desc(B1)' --hidden
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Predecessors template should both predecessors as they are visible
+ $ hg tlog
+ o eb5a0daa2192
+ | Predecessors: 1:471f378eab4c 3:b7ea6d14e664
+ | semi-colon: 1:471f378eab4c; 3:b7ea6d14e664
+ | json: ["471f378eab4c5e25f6c77f785b27c936efb22874", "b7ea6d14e664bdc8922221f7992631b50da3fb07"]
+ | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874 3:b7ea6d14e664bdc8922221f7992631b50da3fb07
+ | @ b7ea6d14e664
+ | |
+ | x 471f378eab4c
+ |/
+ o ea207398892e
+
+ $ hg up 'desc(C0)'
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Predecessors template should show no predecessors as they are both non visible
+ $ hg tlog
+ @ eb5a0daa2192
+ |
+ o ea207398892e
+
+Predecessors template should show all predecessors as we force their display
+with --hidden
+ $ hg tlog --hidden
+ @ eb5a0daa2192
+ | Predecessors: 1:471f378eab4c 3:b7ea6d14e664
+ | semi-colon: 1:471f378eab4c; 3:b7ea6d14e664
+ | json: ["471f378eab4c5e25f6c77f785b27c936efb22874", "b7ea6d14e664bdc8922221f7992631b50da3fb07"]
+ | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874 3:b7ea6d14e664bdc8922221f7992631b50da3fb07
+ | x b7ea6d14e664
+ | | Predecessors: 2:0dec01379d3b
+ | | semi-colon: 2:0dec01379d3b
+ | | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]
+ | | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5
+ | | x 0dec01379d3b
+ | |/
+ | x 471f378eab4c
+ |/
+ o ea207398892e
+
+
+Test template with pushed and pulled obs markers
+================================================
+
+Test setup
+----------
+
+ $ hg init $TESTTMP/templates-local-remote-markers-1
+ $ cd $TESTTMP/templates-local-remote-markers-1
+ $ mkcommit ROOT
+ $ mkcommit A0
+ $ hg clone $TESTTMP/templates-local-remote-markers-1 $TESTTMP/templates-local-remote-markers-2
+ updating to branch default
+ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ cd $TESTTMP/templates-local-remote-markers-2
+ $ hg log --hidden -G
+ @ changeset: 1:471f378eab4c
+ | tag: tip
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+ $ cd $TESTTMP/templates-local-remote-markers-1
+ $ hg commit --amend -m "A1"
+ $ hg commit --amend -m "A2"
+ $ hg log --hidden -G
+ @ changeset: 3:7a230b46bf61
+ | tag: tip
+ | parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A2
+ |
+ | x changeset: 2:fdf9bde5129a
+ |/ parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A1
+ |
+ | x changeset: 1:471f378eab4c
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+ $ cd $TESTTMP/templates-local-remote-markers-2
+ $ hg pull
+ pulling from $TESTTMP/templates-local-remote-markers-1 (glob)
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 0 changes to 1 files (+1 heads)
+ 2 new obsolescence markers
+ (run 'hg heads' to see heads, 'hg merge' to merge)
+ $ hg log --hidden -G
+ o changeset: 2:7a230b46bf61
+ | tag: tip
+ | parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A2
+ |
+ | @ changeset: 1:471f378eab4c
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+
+ $ hg debugobsolete
+ 471f378eab4c5e25f6c77f785b27c936efb22874 fdf9bde5129a28d4548fadd3f62b265cdd3b7a2e 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ fdf9bde5129a28d4548fadd3f62b265cdd3b7a2e 7a230b46bf61e50b30308c6cfd7bd1269ef54702 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+
+Check templates
+---------------
+
+Predecessors template should show current revision as it is the working copy
+ $ hg tlog
+ o 7a230b46bf61
+ | Predecessors: 1:471f378eab4c
+ | semi-colon: 1:471f378eab4c
+ | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
+ | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
+ | @ 471f378eab4c
+ |/
+ o ea207398892e
+
+ $ hg up 'desc(A2)'
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Predecessors template should show no predecessors as they are non visible
+ $ hg tlog
+ @ 7a230b46bf61
+ |
+ o ea207398892e
+
+Predecessors template should show all predecessors as we force their display
+with --hidden
+ $ hg tlog --hidden
+ @ 7a230b46bf61
+ | Predecessors: 1:471f378eab4c
+ | semi-colon: 1:471f378eab4c
+ | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
+ | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
+ | x 471f378eab4c
+ |/
+ o ea207398892e
+
+
+Test template with obsmarkers cycle
+===================================
+
+Test setup
+----------
+
+ $ hg init $TESTTMP/templates-local-cycle
+ $ cd $TESTTMP/templates-local-cycle
+ $ mkcommit ROOT
+ $ mkcommit A0
+ $ mkcommit B0
+ $ hg up -r 0
+ 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ $ mkcommit C0
+ created new head
+
+Create the cycle
+
+ $ hg debugobsolete `getid "desc(A0)"` `getid "desc(B0)"`
+ $ hg debugobsolete `getid "desc(B0)"` `getid "desc(C0)"`
+ $ hg debugobsolete `getid "desc(B0)"` `getid "desc(A0)"`
+
+Check templates
+---------------
+
+ $ hg tlog
+ @ f897c6137566
+ |
+ o ea207398892e
+
+
+ $ hg up -r "desc(B0)" --hidden
+ 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg tlog
+ o f897c6137566
+ | Predecessors: 2:0dec01379d3b
+ | semi-colon: 2:0dec01379d3b
+ | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]
+ | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5
+ | @ 0dec01379d3b
+ | | Predecessors: 1:471f378eab4c
+ | | semi-colon: 1:471f378eab4c
+ | | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
+ | | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
+ | x 471f378eab4c
+ |/ Predecessors: 2:0dec01379d3b
+ | semi-colon: 2:0dec01379d3b
+ | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]
+ | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5
+ o ea207398892e
+
+
+ $ hg up -r "desc(A0)" --hidden
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg tlog
+ o f897c6137566
+ | Predecessors: 1:471f378eab4c
+ | semi-colon: 1:471f378eab4c
+ | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
+ | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
+ | @ 471f378eab4c
+ |/
+ o ea207398892e
+
+
+ $ hg up -r "desc(ROOT)" --hidden
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg tlog
+ o f897c6137566
+ |
+ @ ea207398892e
+
+
+ $ hg tlog --hidden
+ o f897c6137566
+ | Predecessors: 2:0dec01379d3b
+ | semi-colon: 2:0dec01379d3b
+ | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]
+ | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5
+ | x 0dec01379d3b
+ | | Predecessors: 1:471f378eab4c
+ | | semi-colon: 1:471f378eab4c
+ | | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
+ | | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
+ | x 471f378eab4c
+ |/ Predecessors: 2:0dec01379d3b
+ | semi-colon: 2:0dec01379d3b
+ | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]
+ | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5
+ @ ea207398892e
+
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-obsolete-bundle-strip.t Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,1366 @@
+==================================================
+Test obsmarkers interaction with bundle and strip
+==================================================
+
+Setup a repository with various case
+====================================
+
+Config setup
+------------
+
+ $ cat >> $HGRCPATH <<EOF
+ > [ui]
+ > # simpler log output
+ > logtemplate = "{node|short}: {desc}\n"
+ >
+ > [experimental]
+ > # enable evolution
+ > evolution = all
+ >
+ > # include obsmarkers in bundle
+ > evolution.bundle-obsmarker = yes
+ >
+ > [extensions]
+ > # needed for some tests
+ > strip =
+ > [defaults]
+ > # we'll query many hidden changeset
+ > debugobsolete = --hidden
+ > EOF
+
+ $ mkcommit() {
+ > echo "$1" > "$1"
+ > hg add "$1"
+ > hg ci -m "$1"
+ > }
+
+ $ getid() {
+ > hg log --hidden --template '{node}\n' --rev "$1"
+ > }
+
+ $ mktestrepo () {
+ > [ -n "$1" ] || exit 1
+ > cd $TESTTMP
+ > hg init $1
+ > cd $1
+ > mkcommit ROOT
+ > }
+
+Function to compare the expected bundled obsmarkers with the actually bundled
+obsmarkers. It also check the obsmarkers backed up during strip.
+
+ $ testrevs () {
+ > revs="$1"
+ > testname=`basename \`pwd\``
+ > revsname=`hg --hidden log -T '-{desc}' --rev "${revs}"`
+ > prefix="${TESTTMP}/${testname}${revsname}"
+ > markersfile="${prefix}-relevant-markers.txt"
+ > exclufile="${prefix}-exclusive-markers.txt"
+ > bundlefile="${prefix}-bundle.hg"
+ > contentfile="${prefix}-bundle-markers.hg"
+ > stripcontentfile="${prefix}-bundle-markers.hg"
+ > hg debugobsolete --hidden --rev "${revs}" | sed 's/^/ /' > "${markersfile}"
+ > hg debugobsolete --hidden --rev "${revs}" --exclusive | sed 's/^/ /' > "${exclufile}"
+ > echo '### Matched revisions###'
+ > hg log --hidden --rev "${revs}" | sort
+ > echo '### Relevant markers ###'
+ > cat "${markersfile}"
+ > printf "# bundling: "
+ > hg bundle --hidden --base "parents(roots(${revs}))" --rev "${revs}" "${bundlefile}"
+ > hg debugbundle --part-type obsmarkers "${bundlefile}" | sed 1,3d > "${contentfile}"
+ > echo '### Bundled markers ###'
+ > cat "${contentfile}"
+ > echo '### diff <relevant> <bundled> ###'
+ > cmp "${markersfile}" "${contentfile}" || diff -u "${markersfile}" "${contentfile}"
+ > echo '#################################'
+ > echo '### Exclusive markers ###'
+ > cat "${exclufile}"
+ > # if the matched revs do not have children, we also check the result of strip
+ > children=`hg log --hidden --rev "((${revs})::) - (${revs})"`
+ > if [ -z "$children" ];
+ > then
+ > printf "# stripping: "
+ > prestripfile="${prefix}-pre-strip.txt"
+ > poststripfile="${prefix}-post-strip.txt"
+ > strippedfile="${prefix}-stripped-markers.txt"
+ > hg debugobsolete --hidden | sort | sed 's/^/ /' > "${prestripfile}"
+ > hg strip --hidden --rev "${revs}"
+ > hg debugobsolete --hidden | sort | sed 's/^/ /' > "${poststripfile}"
+ > hg debugbundle --part-type obsmarkers .hg/strip-backup/* | sed 1,3d > "${stripcontentfile}"
+ > echo '### Backup markers ###'
+ > cat "${stripcontentfile}"
+ > echo '### diff <relevant> <backed-up> ###'
+ > cmp "${markersfile}" "${stripcontentfile}" || diff -u "${markersfile}" "${stripcontentfile}"
+ > echo '#################################'
+ > cat "${prestripfile}" "${poststripfile}" | sort | uniq -u > "${strippedfile}"
+ > echo '### Stripped markers ###'
+ > cat "${strippedfile}"
+ > echo '### diff <exclusive> <stripped> ###'
+ > cmp "${exclufile}" "${strippedfile}" || diff -u "${exclufile}" "${strippedfile}"
+ > echo '#################################'
+ > # restore and clean up repo for the next test
+ > hg unbundle .hg/strip-backup/* | sed 's/^/# unbundling: /'
+ > # clean up directory for the next test
+ > rm .hg/strip-backup/*
+ > fi
+ > }
+
+root setup
+-------------
+
+simple chain
+============
+
+. A0
+. ⇠ø⇠◔ A1
+. |/
+. ●
+
+setup
+-----
+
+ $ mktestrepo simple-chain
+ $ mkcommit 'C-A0'
+ $ hg up 'desc("ROOT")'
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ mkcommit 'C-A1'
+ created new head
+ $ hg debugobsolete a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 `getid 'desc("C-A0")'`
+ $ hg debugobsolete `getid 'desc("C-A0")'` a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1
+ $ hg debugobsolete a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 `getid 'desc("C-A1")'`
+
+ $ hg up 'desc("ROOT")'
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg log --hidden -G
+ o cf2c22470d67: C-A1
+ |
+ | x 84fcb0dfe17b: C-A0
+ |/
+ @ ea207398892e: ROOT
+
+ $ hg debugobsolete
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+
+Actual testing
+--------------
+
+ $ testrevs 'desc("C-A0")'
+ ### Matched revisions###
+ 84fcb0dfe17b: C-A0
+ ### Relevant markers ###
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 1 changesets found
+ ### Bundled markers ###
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ # stripping: saved backup bundle to $TESTTMP/simple-chain/.hg/strip-backup/84fcb0dfe17b-6454bbdc-backup.hg (glob)
+ ### Backup markers ###
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 1 changesets with 1 changes to 1 files (+1 heads)
+ # unbundling: (run 'hg heads' to see heads)
+
+ $ testrevs 'desc("C-A1")'
+ ### Matched revisions###
+ cf2c22470d67: C-A1
+ ### Relevant markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 1 changesets found
+ ### Bundled markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # stripping: saved backup bundle to $TESTTMP/simple-chain/.hg/strip-backup/cf2c22470d67-fa0f07b0-backup.hg (glob)
+ ### Backup markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 1 changesets with 1 changes to 1 files (+1 heads)
+ # unbundling: 2 new obsolescence markers
+ # unbundling: (run 'hg heads' to see heads)
+
+ $ testrevs 'desc("C-A")'
+ ### Matched revisions###
+ 84fcb0dfe17b: C-A0
+ cf2c22470d67: C-A1
+ ### Relevant markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 2 changesets found
+ ### Bundled markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # stripping: saved backup bundle to $TESTTMP/simple-chain/.hg/strip-backup/cf2c22470d67-fce4fc64-backup.hg (glob)
+ ### Backup markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 2 changesets with 2 changes to 2 files (+1 heads)
+ # unbundling: 3 new obsolescence markers
+ # unbundling: (run 'hg heads' to see heads)
+
+chain with prune children
+=========================
+
+. ⇠⊗ B0
+. |
+. ⇠ø⇠◔ A1
+. |
+. ●
+
+setup
+-----
+
+ $ mktestrepo prune
+ $ mkcommit 'C-A0'
+ $ mkcommit 'C-B0'
+ $ hg up 'desc("ROOT")'
+ 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ $ mkcommit 'C-A1'
+ created new head
+ $ hg debugobsolete a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 `getid 'desc("C-A0")'`
+ $ hg debugobsolete `getid 'desc("C-A0")'` `getid 'desc("C-A1")'`
+ $ hg debugobsolete --record-parents `getid 'desc("C-B0")'`
+ $ hg up 'desc("ROOT")'
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg log --hidden -G
+ o cf2c22470d67: C-A1
+ |
+ | x 29f93b1df87b: C-B0
+ | |
+ | x 84fcb0dfe17b: C-A0
+ |/
+ @ ea207398892e: ROOT
+
+ $ hg debugobsolete
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+
+Actual testing
+--------------
+
+ $ testrevs 'desc("C-A0")'
+ ### Matched revisions###
+ 84fcb0dfe17b: C-A0
+ ### Relevant markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 1 changesets found
+ ### Bundled markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+
+(The strip markers is considered exclusive to the pruned changeset even if it
+is also considered "relevant" to its parent. This allows to strip prune
+markers. This avoid leaving prune markers from dead-end that could be
+problematic)
+
+ $ testrevs 'desc("C-B0")'
+ ### Matched revisions###
+ 29f93b1df87b: C-B0
+ ### Relevant markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 1 changesets found
+ ### Bundled markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # stripping: saved backup bundle to $TESTTMP/prune/.hg/strip-backup/29f93b1df87b-7fb32101-backup.hg (glob)
+ ### Backup markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 1 changesets with 1 changes to 1 files
+ # unbundling: 1 new obsolescence markers
+ # unbundling: (run 'hg update' to get a working copy)
+
+ $ testrevs 'desc("C-A1")'
+ ### Matched revisions###
+ cf2c22470d67: C-A1
+ ### Relevant markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 1 changesets found
+ ### Bundled markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # stripping: saved backup bundle to $TESTTMP/prune/.hg/strip-backup/cf2c22470d67-fa0f07b0-backup.hg (glob)
+ ### Backup markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 1 changesets with 1 changes to 1 files (+1 heads)
+ # unbundling: 1 new obsolescence markers
+ # unbundling: (run 'hg heads' to see heads)
+
+bundling multiple revisions
+
+ $ testrevs 'desc("C-A")'
+ ### Matched revisions###
+ 84fcb0dfe17b: C-A0
+ cf2c22470d67: C-A1
+ ### Relevant markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 2 changesets found
+ ### Bundled markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+
+ $ testrevs 'desc("C-")'
+ ### Matched revisions###
+ 29f93b1df87b: C-B0
+ 84fcb0dfe17b: C-A0
+ cf2c22470d67: C-A1
+ ### Relevant markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 3 changesets found
+ ### Bundled markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # stripping: saved backup bundle to $TESTTMP/prune/.hg/strip-backup/cf2c22470d67-884c33b0-backup.hg (glob)
+ ### Backup markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 3 changesets with 3 changes to 3 files (+1 heads)
+ # unbundling: 3 new obsolescence markers
+ # unbundling: (run 'hg heads' to see heads)
+
+chain with precursors also pruned
+=================================
+
+. A0 (also pruned)
+. ⇠ø⇠◔ A1
+. |
+. ●
+
+setup
+-----
+
+ $ mktestrepo prune-inline
+ $ mkcommit 'C-A0'
+ $ hg up 'desc("ROOT")'
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ mkcommit 'C-A1'
+ created new head
+ $ hg debugobsolete a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 `getid 'desc("C-A0")'`
+ $ hg debugobsolete --record-parents `getid 'desc("C-A0")'`
+ $ hg debugobsolete `getid 'desc("C-A0")'` `getid 'desc("C-A1")'`
+ $ hg up 'desc("ROOT")'
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg log --hidden -G
+ o cf2c22470d67: C-A1
+ |
+ | x 84fcb0dfe17b: C-A0
+ |/
+ @ ea207398892e: ROOT
+
+ $ hg debugobsolete
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+
+Actual testing
+--------------
+
+ $ testrevs 'desc("C-A0")'
+ ### Matched revisions###
+ 84fcb0dfe17b: C-A0
+ ### Relevant markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 1 changesets found
+ ### Bundled markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ # stripping: saved backup bundle to $TESTTMP/prune-inline/.hg/strip-backup/84fcb0dfe17b-6454bbdc-backup.hg (glob)
+ ### Backup markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 1 changesets with 1 changes to 1 files (+1 heads)
+ # unbundling: (run 'hg heads' to see heads)
+
+ $ testrevs 'desc("C-A1")'
+ ### Matched revisions###
+ cf2c22470d67: C-A1
+ ### Relevant markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 1 changesets found
+ ### Bundled markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # stripping: saved backup bundle to $TESTTMP/prune-inline/.hg/strip-backup/cf2c22470d67-fa0f07b0-backup.hg (glob)
+ ### Backup markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 1 changesets with 1 changes to 1 files (+1 heads)
+ # unbundling: 1 new obsolescence markers
+ # unbundling: (run 'hg heads' to see heads)
+
+ $ testrevs 'desc("C-A")'
+ ### Matched revisions###
+ 84fcb0dfe17b: C-A0
+ cf2c22470d67: C-A1
+ ### Relevant markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 2 changesets found
+ ### Bundled markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # stripping: saved backup bundle to $TESTTMP/prune-inline/.hg/strip-backup/cf2c22470d67-fce4fc64-backup.hg (glob)
+ ### Backup markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 2 changesets with 2 changes to 2 files (+1 heads)
+ # unbundling: 3 new obsolescence markers
+ # unbundling: (run 'hg heads' to see heads)
+
+chain with missing prune
+========================
+
+. ⊗ B
+. |
+. ⇠◌⇠◔ A1
+. |
+. ●
+
+setup
+-----
+
+ $ mktestrepo missing-prune
+ $ mkcommit 'C-A0'
+ $ mkcommit 'C-B0'
+ $ hg up 'desc("ROOT")'
+ 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ $ mkcommit 'C-A1'
+ created new head
+ $ hg debugobsolete a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 `getid 'desc("C-A0")'`
+ $ hg debugobsolete `getid 'desc("C-A0")'` `getid 'desc("C-A1")'`
+ $ hg debugobsolete --record-parents `getid 'desc("C-B0")'`
+
+(it is annoying to create prune with parent data without the changeset, so we strip it after the fact)
+
+ $ hg strip --hidden --rev 'desc("C-A0")::' --no-backup --config devel.strip-obsmarkers=no
+
+ $ hg up 'desc("ROOT")'
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg log --hidden -G
+ o cf2c22470d67: C-A1
+ |
+ @ ea207398892e: ROOT
+
+ $ hg debugobsolete
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+
+Actual testing
+--------------
+
+ $ testrevs 'desc("C-A1")'
+ ### Matched revisions###
+ cf2c22470d67: C-A1
+ ### Relevant markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 1 changesets found
+ ### Bundled markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # stripping: saved backup bundle to $TESTTMP/missing-prune/.hg/strip-backup/cf2c22470d67-fa0f07b0-backup.hg (glob)
+ ### Backup markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 1 changesets with 1 changes to 1 files
+ # unbundling: 3 new obsolescence markers
+ # unbundling: (run 'hg update' to get a working copy)
+
+chain with precursors also pruned
+=================================
+
+. A0 (also pruned)
+. ⇠◌⇠◔ A1
+. |
+. ●
+
+setup
+-----
+
+ $ mktestrepo prune-inline-missing
+ $ mkcommit 'C-A0'
+ $ hg up 'desc("ROOT")'
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ mkcommit 'C-A1'
+ created new head
+ $ hg debugobsolete a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 `getid 'desc("C-A0")'`
+ $ hg debugobsolete --record-parents `getid 'desc("C-A0")'`
+ $ hg debugobsolete `getid 'desc("C-A0")'` `getid 'desc("C-A1")'`
+
+(it is annoying to create prune with parent data without the changeset, so we strip it after the fact)
+
+ $ hg strip --hidden --rev 'desc("C-A0")::' --no-backup --config devel.strip-obsmarkers=no
+
+ $ hg up 'desc("ROOT")'
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg log --hidden -G
+ o cf2c22470d67: C-A1
+ |
+ @ ea207398892e: ROOT
+
+ $ hg debugobsolete
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+
+Actual testing
+--------------
+
+ $ testrevs 'desc("C-A1")'
+ ### Matched revisions###
+ cf2c22470d67: C-A1
+ ### Relevant markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 1 changesets found
+ ### Bundled markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # stripping: saved backup bundle to $TESTTMP/prune-inline-missing/.hg/strip-backup/cf2c22470d67-fa0f07b0-backup.hg (glob)
+ ### Backup markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 1 changesets with 1 changes to 1 files
+ # unbundling: 3 new obsolescence markers
+ # unbundling: (run 'hg update' to get a working copy)
+
+Chain with fold and split
+=========================
+
+setup
+-----
+
+ $ mktestrepo split-fold
+ $ mkcommit 'C-A'
+ $ hg up 'desc("ROOT")'
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ mkcommit 'C-B'
+ created new head
+ $ hg up 'desc("ROOT")'
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ mkcommit 'C-C'
+ created new head
+ $ hg up 'desc("ROOT")'
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ mkcommit 'C-D'
+ created new head
+ $ hg up 'desc("ROOT")'
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ mkcommit 'C-E'
+ created new head
+ $ hg debugobsolete a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 `getid 'desc("C-A")'`
+ $ hg debugobsolete `getid 'desc("C-A")'` `getid 'desc("C-B")'` `getid 'desc("C-C")'` # record split
+ $ hg debugobsolete `getid 'desc("C-A")'` `getid 'desc("C-D")'` # other divergent
+ $ hg debugobsolete `getid 'desc("C-A")'` b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0
+ $ hg debugobsolete b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 `getid 'desc("C-E")'`
+ $ hg debugobsolete `getid 'desc("C-B")'` `getid 'desc("C-E")'`
+ $ hg debugobsolete `getid 'desc("C-C")'` `getid 'desc("C-E")'`
+ $ hg debugobsolete `getid 'desc("C-D")'` `getid 'desc("C-E")'`
+ $ hg debugobsolete c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 `getid 'desc("C-E")'`
+
+ $ hg up 'desc("ROOT")'
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg log --hidden -G
+ o 2f20ff6509f0: C-E
+ |
+ | x 06dc9da25ef0: C-D
+ |/
+ | x 27ec657ca21d: C-C
+ |/
+ | x a9b9da38ed96: C-B
+ |/
+ | x 9ac430e15fca: C-A
+ |/
+ @ ea207398892e: ROOT
+
+ $ hg debugobsolete
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+
+Actual testing
+--------------
+
+ $ testrevs 'desc("C-A")'
+ ### Matched revisions###
+ 9ac430e15fca: C-A
+ ### Relevant markers ###
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 1 changesets found
+ ### Bundled markers ###
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/9ac430e15fca-81204eba-backup.hg (glob)
+ ### Backup markers ###
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 1 changesets with 1 changes to 1 files (+1 heads)
+ # unbundling: (run 'hg heads' to see heads)
+
+ $ testrevs 'desc("C-B")'
+ ### Matched revisions###
+ a9b9da38ed96: C-B
+ ### Relevant markers ###
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 1 changesets found
+ ### Bundled markers ###
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/a9b9da38ed96-7465d6e9-backup.hg (glob)
+ ### Backup markers ###
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 1 changesets with 1 changes to 1 files (+1 heads)
+ # unbundling: (run 'hg heads' to see heads)
+
+ $ testrevs 'desc("C-C")'
+ ### Matched revisions###
+ 27ec657ca21d: C-C
+ ### Relevant markers ###
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 1 changesets found
+ ### Bundled markers ###
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/27ec657ca21d-d5dd1c7c-backup.hg (glob)
+ ### Backup markers ###
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 1 changesets with 1 changes to 1 files (+1 heads)
+ # unbundling: (run 'hg heads' to see heads)
+
+ $ testrevs 'desc("C-D")'
+ ### Matched revisions###
+ 06dc9da25ef0: C-D
+ ### Relevant markers ###
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 1 changesets found
+ ### Bundled markers ###
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/06dc9da25ef0-9b1c0a91-backup.hg (glob)
+ ### Backup markers ###
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 1 changesets with 1 changes to 1 files (+1 heads)
+ # unbundling: (run 'hg heads' to see heads)
+
+ $ testrevs 'desc("C-E")'
+ ### Matched revisions###
+ 2f20ff6509f0: C-E
+ ### Relevant markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 1 changesets found
+ ### Bundled markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/2f20ff6509f0-8adeb22d-backup.hg (glob)
+ ### Backup markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 1 changesets with 1 changes to 1 files (+1 heads)
+ # unbundling: 6 new obsolescence markers
+ # unbundling: (run 'hg heads' to see heads)
+
+Bundle multiple revisions
+
+* each part of the split
+
+ $ testrevs 'desc("C-B") + desc("C-C")'
+ ### Matched revisions###
+ 27ec657ca21d: C-C
+ a9b9da38ed96: C-B
+ ### Relevant markers ###
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 2 changesets found
+ ### Bundled markers ###
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/a9b9da38ed96-0daf625a-backup.hg (glob)
+ ### Backup markers ###
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 2 changesets with 2 changes to 2 files (+2 heads)
+ # unbundling: (run 'hg heads' to see heads)
+
+* top one and other divergent
+
+ $ testrevs 'desc("C-E") + desc("C-D")'
+ ### Matched revisions###
+ 06dc9da25ef0: C-D
+ 2f20ff6509f0: C-E
+ ### Relevant markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 2 changesets found
+ ### Bundled markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/2f20ff6509f0-bf1b80f4-backup.hg (glob)
+ ### Backup markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 2 changesets with 2 changes to 2 files (+2 heads)
+ # unbundling: 7 new obsolescence markers
+ # unbundling: (run 'hg heads' to see heads)
+
+* top one and initial precursors
+
+ $ testrevs 'desc("C-E") + desc("C-A")'
+ ### Matched revisions###
+ 2f20ff6509f0: C-E
+ 9ac430e15fca: C-A
+ ### Relevant markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 2 changesets found
+ ### Bundled markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/9ac430e15fca-36b6476a-backup.hg (glob)
+ ### Backup markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 2 changesets with 2 changes to 2 files (+2 heads)
+ # unbundling: 6 new obsolescence markers
+ # unbundling: (run 'hg heads' to see heads)
+
+* top one and one of the split
+
+ $ testrevs 'desc("C-E") + desc("C-C")'
+ ### Matched revisions###
+ 27ec657ca21d: C-C
+ 2f20ff6509f0: C-E
+ ### Relevant markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 2 changesets found
+ ### Bundled markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/2f20ff6509f0-5fdfcd7d-backup.hg (glob)
+ ### Backup markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 2 changesets with 2 changes to 2 files (+2 heads)
+ # unbundling: 7 new obsolescence markers
+ # unbundling: (run 'hg heads' to see heads)
+
+* all
+
+ $ testrevs 'desc("C-")'
+ ### Matched revisions###
+ 06dc9da25ef0: C-D
+ 27ec657ca21d: C-C
+ 2f20ff6509f0: C-E
+ 9ac430e15fca: C-A
+ a9b9da38ed96: C-B
+ ### Relevant markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 5 changesets found
+ ### Bundled markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/a9b9da38ed96-eeb4258f-backup.hg (glob)
+ ### Backup markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 5 changesets with 5 changes to 5 files (+4 heads)
+ # unbundling: 9 new obsolescence markers
+ # unbundling: (run 'hg heads' to see heads)
+
+changeset pruned on its own
+===========================
+
+. ⊗ B
+. |
+. ◕ A
+. |
+. ●
+
+setup
+-----
+
+ $ mktestrepo lonely-prune
+ $ hg up 'desc("ROOT")'
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ mkcommit 'C-A'
+ $ mkcommit 'C-B'
+ $ hg debugobsolete --record-parent `getid 'desc("C-B")'`
+
+ $ hg up 'desc("ROOT")'
+ 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ $ hg log --hidden -G
+ x cefb651fc2fd: C-B
+ |
+ o 9ac430e15fca: C-A
+ |
+ @ ea207398892e: ROOT
+
+ $ hg debugobsolete
+ cefb651fc2fdc7bb75e588781de5e432c134e8a5 0 {9ac430e15fca923b0ba027ca85d4d75c5c9cb73c} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+
+Actual testing
+--------------
+ $ testrevs 'desc("C-A")'
+ ### Matched revisions###
+ 9ac430e15fca: C-A
+ ### Relevant markers ###
+ cefb651fc2fdc7bb75e588781de5e432c134e8a5 0 {9ac430e15fca923b0ba027ca85d4d75c5c9cb73c} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 1 changesets found
+ ### Bundled markers ###
+ cefb651fc2fdc7bb75e588781de5e432c134e8a5 0 {9ac430e15fca923b0ba027ca85d4d75c5c9cb73c} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ $ testrevs 'desc("C-B")'
+ ### Matched revisions###
+ cefb651fc2fd: C-B
+ ### Relevant markers ###
+ cefb651fc2fdc7bb75e588781de5e432c134e8a5 0 {9ac430e15fca923b0ba027ca85d4d75c5c9cb73c} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 1 changesets found
+ ### Bundled markers ###
+ cefb651fc2fdc7bb75e588781de5e432c134e8a5 0 {9ac430e15fca923b0ba027ca85d4d75c5c9cb73c} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ cefb651fc2fdc7bb75e588781de5e432c134e8a5 0 {9ac430e15fca923b0ba027ca85d4d75c5c9cb73c} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # stripping: saved backup bundle to $TESTTMP/lonely-prune/.hg/strip-backup/cefb651fc2fd-345c8dfa-backup.hg (glob)
+ ### Backup markers ###
+ cefb651fc2fdc7bb75e588781de5e432c134e8a5 0 {9ac430e15fca923b0ba027ca85d4d75c5c9cb73c} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ cefb651fc2fdc7bb75e588781de5e432c134e8a5 0 {9ac430e15fca923b0ba027ca85d4d75c5c9cb73c} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 1 changesets with 1 changes to 1 files
+ # unbundling: 1 new obsolescence markers
+ # unbundling: (run 'hg update' to get a working copy)
+ $ testrevs 'desc("C-")'
+ ### Matched revisions###
+ 9ac430e15fca: C-A
+ cefb651fc2fd: C-B
+ ### Relevant markers ###
+ cefb651fc2fdc7bb75e588781de5e432c134e8a5 0 {9ac430e15fca923b0ba027ca85d4d75c5c9cb73c} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 2 changesets found
+ ### Bundled markers ###
+ cefb651fc2fdc7bb75e588781de5e432c134e8a5 0 {9ac430e15fca923b0ba027ca85d4d75c5c9cb73c} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ cefb651fc2fdc7bb75e588781de5e432c134e8a5 0 {9ac430e15fca923b0ba027ca85d4d75c5c9cb73c} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # stripping: saved backup bundle to $TESTTMP/lonely-prune/.hg/strip-backup/9ac430e15fca-b9855b02-backup.hg (glob)
+ ### Backup markers ###
+ cefb651fc2fdc7bb75e588781de5e432c134e8a5 0 {9ac430e15fca923b0ba027ca85d4d75c5c9cb73c} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ cefb651fc2fdc7bb75e588781de5e432c134e8a5 0 {9ac430e15fca923b0ba027ca85d4d75c5c9cb73c} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 2 changesets with 2 changes to 2 files
+ # unbundling: 1 new obsolescence markers
+ # unbundling: (run 'hg update' to get a working copy)
--- a/tests/test-obsolete-changeset-exchange.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-obsolete-changeset-exchange.t Tue Jun 20 16:33:46 2017 -0400
@@ -83,6 +83,23 @@
adding file changes
added 1 changesets with 0 changes to 1 files (+1 heads)
(run 'hg heads' to see heads)
+
+check-that bundle can contain markers:
+
+ $ hg bundle --hidden --rev f89bcc95eba5 --base "f89bcc95eba5^" ../f89bcc95eba5-obs.hg --config experimental.evolution.bundle-obsmarker=1
+ 1 changesets found
+ $ hg debugbundle ../f89bcc95eba5.hg
+ Stream params: sortdict([('Compression', 'BZ')])
+ changegroup -- "sortdict([('version', '02'), ('nbchanges', '1')])"
+ f89bcc95eba5174b1ccc3e33a82e84c96e8338ee
+ $ hg debugbundle ../f89bcc95eba5-obs.hg
+ Stream params: sortdict([('Compression', 'BZ')])
+ changegroup -- "sortdict([('version', '02'), ('nbchanges', '1')])"
+ f89bcc95eba5174b1ccc3e33a82e84c96e8338ee
+ obsmarkers -- 'sortdict()'
+ version: 1 (70 bytes)
+ 9d73aac1b2ed7d53835eaeec212ed41ea47da53a f89bcc95eba5174b1ccc3e33a82e84c96e8338ee 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+
$ cd ..
pull does not fetch excessive changesets when common node is hidden (issue4982)
@@ -126,7 +143,7 @@
searching for changes
taking quick initial sample
query 2; still undecided: 2, sample size is: 2
- 2 total queries
+ 2 total queries in *.????s (glob)
1 changesets found
list of changesets:
bec0734cd68e84477ba7fc1d13e6cff53ab70129
@@ -144,11 +161,11 @@
adding file changes
adding foo revisions
added 1 changesets with 1 changes to 1 files (+1 heads)
- updating the branch cache
bundle2-input-part: total payload size 476
bundle2-input-part: "listkeys" (params: 1 mandatory) supported
bundle2-input-part: total payload size 58
bundle2-input-part: "listkeys" (params: 1 mandatory) supported
bundle2-input-bundle: 2 parts total
checking for updated bookmarks
+ updating the branch cache
(run 'hg heads' to see heads, 'hg merge' to merge)
--- a/tests/test-obsolete.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-obsolete.t Tue Jun 20 16:33:46 2017 -0400
@@ -267,6 +267,42 @@
o 0:1f0dee641bb7 (public) [ ] add a
+Basic exclusive testing
+
+ $ hg log -G --hidden
+ @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
+ |
+ | x 5:5601fb93a350 (draft *obsolete*) [ ] add new_3_c
+ |/
+ | x 4:ca819180edb9 (draft *obsolete*) [ ] add new_2_c
+ |/
+ | x 3:cdbce2fbb163 (draft *obsolete*) [ ] add new_c
+ |/
+ | o 2:245bde4270cd (public) [ ] add original_c
+ |/
+ o 1:7c3bad9141dc (public) [ ] add b
+ |
+ o 0:1f0dee641bb7 (public) [ ] add a
+
+ $ hg debugobsolete --rev 6f9641995072
+ 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
+ 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
+ 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
+ ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
+ cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
+ $ hg debugobsolete --rev 6f9641995072 --exclusive
+ 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
+ $ hg debugobsolete --rev 5601fb93a350 --hidden
+ 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
+ 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
+ ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
+ cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
+ $ hg debugobsolete --rev 5601fb93a350 --hidden --exclusive
+ $ hg debugobsolete --rev 5601fb93a350+6f9641995072 --hidden --exclusive
+ 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
+ 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
+ ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
+
$ cd ..
Revision 0 is hidden
@@ -563,7 +599,8 @@
$ hg up -q 'desc(n3w_3_c)'
$ mkcommit obsolete_e
created new head
- $ hg debugobsolete `getid 'original_e'` `getid 'obsolete_e'`
+ $ hg debugobsolete `getid 'original_e'` `getid 'obsolete_e'` \
+ > -u 'test <test@example.net>'
$ hg outgoing ../tmpf # parasite hg outgoing testin
comparing with ../tmpf
searching for changes
@@ -611,7 +648,7 @@
ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
- cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test'} (glob)
+ cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test <test@example.net>'} (glob)
List of changesets with no chain
@@ -620,7 +657,7 @@
List of changesets that are included on marker chain
$ hg debugobsolete --hidden --rev 6
- cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test'} (glob)
+ cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test <test@example.net>'} (glob)
List of changesets with a longer chain, (including a pruned children)
@@ -642,7 +679,7 @@
5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
- cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test'} (glob)
+ cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test <test@example.net>'} (glob)
cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
List of all markers in JSON
@@ -702,7 +739,7 @@
{
"date": *, (glob)
"flag": 0,
- "metadata": {"user": "test"},
+ "metadata": {"user": "test <test@example.net>"},
"precnode": "cda648ca50f50482b7055c0b0c4c117bba6733d9",
"succnodes": ["3de5eca88c00aa039da7399a220f4a5221faa585"]
}
@@ -713,11 +750,11 @@
$ hg debugobsolete -r6 -T '{succnodes % "{node|short}"} {date|shortdate}\n'
3de5eca88c00 ????-??-?? (glob)
$ hg debugobsolete -r6 -T '{join(metadata % "{key}={value}", " ")}\n'
- user=test
+ user=test <test@example.net>
$ hg debugobsolete -r6 -T '{metadata}\n'
- 'user': 'test'
+ 'user': 'test <test@example.net>'
$ hg debugobsolete -r6 -T '{flag} {get(metadata, "user")}\n'
- 0 test
+ 0 test <test@example.net>
Test the debug output for exchange
----------------------------------
@@ -1065,11 +1102,11 @@
Test heads computation on pending index changes with obsolescence markers
$ cd ..
$ cat >$TESTTMP/test_extension.py << EOF
- > from mercurial import cmdutil
+ > from mercurial import cmdutil, registrar
> from mercurial.i18n import _
>
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
> @command("amendtransient",[], _('hg amendtransient [rev]'))
> def amend(ui, repo, *pats, **opts):
> def commitfunc(ui, repo, message, match, opts):
@@ -1093,25 +1130,6 @@
$ hg amendtransient
[1, 3]
-Check that corrupted hidden cache does not crash
-
- $ printf "" > .hg/cache/hidden
- $ hg log -r . -T '{node}' --debug
- corrupted hidden cache
- 8fd96dfc63e51ed5a8af1bec18eb4b19dbf83812 (no-eol)
- $ hg log -r . -T '{node}' --debug
- 8fd96dfc63e51ed5a8af1bec18eb4b19dbf83812 (no-eol)
-
-#if unix-permissions
-Check that wrong hidden cache permission does not crash
-
- $ chmod 000 .hg/cache/hidden
- $ hg log -r . -T '{node}' --debug
- cannot read hidden cache
- error writing hidden changesets cache
- 8fd96dfc63e51ed5a8af1bec18eb4b19dbf83812 (no-eol)
-#endif
-
Test cache consistency for the visible filter
1) We want to make sure that the cached filtered revs are invalidated when
bookmarks change
@@ -1194,24 +1212,88 @@
o 0:a78f55e5508c (draft) [ ] 0
+ $ hg strip --hidden -r 2 --config extensions.strip= --config devel.strip-obsmarkers=no
+ saved backup bundle to $TESTTMP/tmpe/issue4845/.hg/strip-backup/e008cf283490-39c978dc-backup.hg (glob)
+ $ hg debugobsolete
+ e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (*) {'user': 'test'} (glob)
+ f27abbcc1f77fb409cf9160482fe619541e2d605 0 {e008cf2834908e5d6b0f792a9d4b0e2272260fb8} (*) {'user': 'test'} (glob)
+ $ hg log -G
+ @ 2:b0551702f918 (draft) [tip ] 2
+ |
+ o 1:e016b03fd86f (draft) [ ] 1
+ |
+ o 0:a78f55e5508c (draft) [ ] 0
+
+ $ hg log -G --hidden
+ @ 2:b0551702f918 (draft) [tip ] 2
+ |
+ o 1:e016b03fd86f (draft) [ ] 1
+ |
+ o 0:a78f55e5508c (draft) [ ] 0
+
+ $ hg debugbundle .hg/strip-backup/e008cf283490-*-backup.hg
+ Stream params: sortdict([('Compression', 'BZ')])
+ changegroup -- "sortdict([('version', '02'), ('nbchanges', '2')])"
+ e008cf2834908e5d6b0f792a9d4b0e2272260fb8
+ f27abbcc1f77fb409cf9160482fe619541e2d605
+ obsmarkers -- 'sortdict()'
+ version: 1 (70 bytes)
+ f27abbcc1f77fb409cf9160482fe619541e2d605 0 {e008cf2834908e5d6b0f792a9d4b0e2272260fb8} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+
+ $ hg pull .hg/strip-backup/e008cf283490-*-backup.hg
+ pulling from .hg/strip-backup/e008cf283490-39c978dc-backup.hg
+ searching for changes
+ no changes found
+ $ hg debugobsolete
+ e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (*) {'user': 'test'} (glob)
+ f27abbcc1f77fb409cf9160482fe619541e2d605 0 {e008cf2834908e5d6b0f792a9d4b0e2272260fb8} (*) {'user': 'test'} (glob)
+ $ hg log -G
+ @ 2:b0551702f918 (draft) [tip ] 2
+ |
+ o 1:e016b03fd86f (draft) [ ] 1
+ |
+ o 0:a78f55e5508c (draft) [ ] 0
+
+ $ hg log -G --hidden
+ @ 2:b0551702f918 (draft) [tip ] 2
+ |
+ o 1:e016b03fd86f (draft) [ ] 1
+ |
+ o 0:a78f55e5508c (draft) [ ] 0
+
+
+Testing that strip remove markers:
+
$ hg strip -r 1 --config extensions.strip=
0 files updated, 0 files merged, 2 files removed, 0 files unresolved
- saved backup bundle to $TESTTMP/tmpe/issue4845/.hg/strip-backup/e016b03fd86f-c41c6bcc-backup.hg (glob)
+ saved backup bundle to $TESTTMP/tmpe/issue4845/.hg/strip-backup/e016b03fd86f-65ede734-backup.hg (glob)
+ $ hg debugobsolete
$ hg log -G
@ 0:a78f55e5508c (draft) [tip ] 0
$ hg log -G --hidden
@ 0:a78f55e5508c (draft) [tip ] 0
+ $ hg debugbundle .hg/strip-backup/e016b03fd86f-*-backup.hg
+ Stream params: sortdict([('Compression', 'BZ')])
+ changegroup -- "sortdict([('version', '02'), ('nbchanges', '2')])"
+ e016b03fd86fcccc54817d120b90b751aaf367d6
+ b0551702f918510f01ae838ab03a463054c67b46
+ obsmarkers -- 'sortdict()'
+ version: 1 (139 bytes)
+ e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ f27abbcc1f77fb409cf9160482fe619541e2d605 0 {e008cf2834908e5d6b0f792a9d4b0e2272260fb8} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
- $ hg pull .hg/strip-backup/*
- pulling from .hg/strip-backup/e016b03fd86f-c41c6bcc-backup.hg
- searching for changes
+ $ hg unbundle .hg/strip-backup/e016b03fd86f-*-backup.hg
adding changesets
adding manifests
adding file changes
added 2 changesets with 2 changes to 2 files
+ 2 new obsolescence markers
(run 'hg update' to get a working copy)
+ $ hg debugobsolete | sort
+ e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (*) {'user': 'test'} (glob)
+ f27abbcc1f77fb409cf9160482fe619541e2d605 0 {e008cf2834908e5d6b0f792a9d4b0e2272260fb8} (*) {'user': 'test'} (glob)
$ hg log -G
o 2:b0551702f918 (draft) [tip ] 2
|
@@ -1245,14 +1327,14 @@
$ echo d > d
$ hg ci -Am d
adding d
- $ hg ci --amend -m dd
+ $ hg ci --amend -m dd --config experimental.evolution.track-operation=1
$ hg debugobsolete --index --rev "3+7"
1 6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1 d27fb9b066076fd921277a4b9e8b9cb48c95bc6a 0 \(.*\) {'user': 'test'} (re)
- 3 4715cf767440ed891755448016c2b8cf70760c30 7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d 0 \(.*\) {'user': 'test'} (re)
+ 3 4715cf767440ed891755448016c2b8cf70760c30 7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d 0 \(.*\) {'operation': 'amend', 'user': 'test'} (re)
$ hg debugobsolete --index --rev "3+7" -Tjson
[
{
- "date": *, (glob)
+ "date": [0.0, 0],
"flag": 0,
"index": 1,
"metadata": {"user": "test"},
@@ -1260,10 +1342,10 @@
"succnodes": ["d27fb9b066076fd921277a4b9e8b9cb48c95bc6a"]
},
{
- "date": *, (glob)
+ "date": [0.0, 0],
"flag": 0,
"index": 3,
- "metadata": {"user": "test"},
+ "metadata": {"operation": "amend", "user": "test"},
"precnode": "4715cf767440ed891755448016c2b8cf70760c30",
"succnodes": ["7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d"]
}
@@ -1271,14 +1353,45 @@
Test the --delete option of debugobsolete command
$ hg debugobsolete --index
- 0 cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b f9bd49731b0b175e42992a3c8fa6c678b2bc11f1 0 \(.*\) {'user': 'test'} (re)
- 1 6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1 d27fb9b066076fd921277a4b9e8b9cb48c95bc6a 0 \(.*\) {'user': 'test'} (re)
- 2 1ab51af8f9b41ef8c7f6f3312d4706d870b1fb74 29346082e4a9e27042b62d2da0e2de211c027621 0 \(.*\) {'user': 'test'} (re)
- 3 4715cf767440ed891755448016c2b8cf70760c30 7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d 0 \(.*\) {'user': 'test'} (re)
+ 0 cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b f9bd49731b0b175e42992a3c8fa6c678b2bc11f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 1 6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1 d27fb9b066076fd921277a4b9e8b9cb48c95bc6a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 2 1ab51af8f9b41ef8c7f6f3312d4706d870b1fb74 29346082e4a9e27042b62d2da0e2de211c027621 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 3 4715cf767440ed891755448016c2b8cf70760c30 7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
$ hg debugobsolete --delete 1 --delete 3
deleted 2 obsolescence markers
$ hg debugobsolete
- cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b f9bd49731b0b175e42992a3c8fa6c678b2bc11f1 0 \(.*\) {'user': 'test'} (re)
- 1ab51af8f9b41ef8c7f6f3312d4706d870b1fb74 29346082e4a9e27042b62d2da0e2de211c027621 0 \(.*\) {'user': 'test'} (re)
+ cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b f9bd49731b0b175e42992a3c8fa6c678b2bc11f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 1ab51af8f9b41ef8c7f6f3312d4706d870b1fb74 29346082e4a9e27042b62d2da0e2de211c027621 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+
+Test adding changeset after obsmarkers affecting it
+(eg: during pull, or unbundle)
+
+ $ mkcommit e
+ $ hg bundle -r . --base .~1 ../bundle-2.hg
+ 1 changesets found
+ $ getid .
+ $ hg --config extensions.strip= strip -r .
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ saved backup bundle to $TESTTMP/tmpe/issue4845/doindexrev/.hg/strip-backup/9bc153528424-ee80edd4-backup.hg (glob)
+ $ hg debugobsolete 9bc153528424ea266d13e57f9ff0d799dfe61e4b
+ $ hg unbundle ../bundle-2.hg
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+ (run 'hg update' to get a working copy)
+ $ hg log -G
+ @ 7:7ae79c5d60f0 (draft) [tip ] dd
+ |
+ | o 6:4715cf767440 (draft) [ ] d
+ |/
+ o 5:29346082e4a9 (draft) [ ] cc
+ |
+ o 3:d27fb9b06607 (draft) [ ] bb
+ |
+ | o 2:6fdef60fcbab (draft) [ ] b
+ |/
+ o 1:f9bd49731b0b (draft) [ ] aa
+
+
$ cd ..
-
--- a/tests/test-oldcgi.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-oldcgi.t Tue Jun 20 16:33:46 2017 -0400
@@ -4,7 +4,7 @@
$ hg init test
$ cat >hgweb.cgi <<HGWEB
- > #!/usr/bin/env python
+ > #!$PYTHON
> #
> # An example CGI script to use hgweb, edit as necessary
>
@@ -26,7 +26,7 @@
> HGWEBDIRCONF
$ cat >hgwebdir.cgi <<HGWEBDIR
- > #!/usr/bin/env python
+ > #!$PYTHON
> #
> # An example CGI script to export multiple hgweb repos, edit as necessary
>
--- a/tests/test-pager-legacy.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-pager-legacy.t Tue Jun 20 16:33:46 2017 -0400
@@ -14,7 +14,7 @@
> [extensions]
> pager=
> [pager]
- > pager = python $TESTTMP/fakepager.py
+ > pager = $PYTHON $TESTTMP/fakepager.py
> EOF
$ hg init repo
@@ -22,7 +22,7 @@
$ echo a >> a
$ hg add a
$ hg ci -m 'add a'
- $ for x in `python $TESTDIR/seq.py 1 10`; do
+ $ for x in `$PYTHON $TESTDIR/seq.py 1 10`; do
> echo a $x >> a
> hg ci -m "modify a $x"
> done
@@ -214,9 +214,9 @@
Pager should not override the exit code of other commands
$ cat >> $TESTTMP/fortytwo.py <<'EOF'
- > from mercurial import cmdutil, commands
+ > from mercurial import registrar, commands
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
> @command('fortytwo', [], 'fortytwo', norepo=True)
> def fortytwo(ui, *opts):
> ui.write('42\n')
--- a/tests/test-pager.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-pager.t Tue Jun 20 16:33:46 2017 -0400
@@ -1,7 +1,11 @@
$ cat >> fakepager.py <<EOF
> import sys
+ > printed = False
> for line in sys.stdin:
> sys.stdout.write('paged! %r\n' % line)
+ > printed = True
+ > if not printed:
+ > sys.stdout.write('paged empty output!\n')
> EOF
Enable ui.formatted because pager won't fire without it, and set up
@@ -12,7 +16,7 @@
> formatted = yes
> color = no
> [pager]
- > pager = python $TESTTMP/fakepager.py
+ > pager = $PYTHON $TESTTMP/fakepager.py
> EOF
$ hg init repo
@@ -20,7 +24,7 @@
$ echo a >> a
$ hg add a
$ hg ci -m 'add a'
- $ for x in `python $TESTDIR/seq.py 1 10`; do
+ $ for x in `$PYTHON $TESTDIR/seq.py 1 10`; do
> echo a $x >> a
> hg ci -m "modify a $x"
> done
@@ -223,9 +227,9 @@
Pager should not override the exit code of other commands
$ cat >> $TESTTMP/fortytwo.py <<'EOF'
- > from mercurial import cmdutil, commands
+ > from mercurial import commands, registrar
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
> @command('fortytwo', [], 'fortytwo', norepo=True)
> def fortytwo(ui, *opts):
> ui.write('42\n')
@@ -281,6 +285,15 @@
9: a 9
10: a 10
+A command with --output option:
+
+ $ hg cat -r0 a
+ paged! 'a\n'
+ $ hg cat -r0 a --output=-
+ paged! 'a\n'
+ $ hg cat -r0 a --output=out
+ $ rm out
+
Put annotate in the ignore list for pager:
$ cat >> $HGRCPATH <<EOF
> [pager]
--- a/tests/test-parse-date.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-parse-date.t Tue Jun 20 16:33:46 2017 -0400
@@ -17,13 +17,13 @@
$ hg ci -d "1150000000 14400" -m "rev 4 (merge)"
$ echo "fail" >> a
$ hg ci -d "should fail" -m "fail"
- abort: invalid date: 'should fail'
+ hg: parse error: invalid date: 'should fail'
[255]
$ hg ci -d "100000000000000000 1400" -m "fail"
- abort: date exceeds 32 bits: 100000000000000000
+ hg: parse error: date exceeds 32 bits: 100000000000000000
[255]
$ hg ci -d "100000 1400000" -m "fail"
- abort: impossible time zone offset: 1400000
+ hg: parse error: impossible time zone offset: 1400000
[255]
Check with local timezone other than GMT and with DST
--- a/tests/test-parseindex.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-parseindex.t Tue Jun 20 16:33:46 2017 -0400
@@ -53,7 +53,7 @@
> for r in cl:
> print short(cl.node(r))
> EOF
- $ python test.py
+ $ $PYTHON test.py
2 revisions:
7c31755bf9b5
26333235a41c
@@ -167,13 +167,13 @@
> print inst
> EOF
- $ python test.py limit/.hg/store
+ $ $PYTHON test.py limit/.hg/store
reachableroots: parent out of range
compute_phases_map_sets: parent out of range
index_headrevs: parent out of range
find_gca_candidates: parent out of range
find_deepest: parent out of range
- $ python test.py segv/.hg/store
+ $ $PYTHON test.py segv/.hg/store
reachableroots: parent out of range
compute_phases_map_sets: parent out of range
index_headrevs: parent out of range
--- a/tests/test-parseindex2.py Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-parseindex2.py Tue Jun 20 16:33:46 2017 -0400
@@ -14,9 +14,11 @@
nullrev,
)
from mercurial import (
- parsers,
+ policy,
)
+parsers = policy.importmod(r'parsers')
+
# original python implementation
def gettype(q):
return int(q & 0xFFFF)
@@ -114,7 +116,7 @@
# of the currently-running Python interpreter, so we monkey-patch
# sys.hexversion to simulate using different versions.
code = ("import sys; sys.hexversion=%s; "
- "import mercurial.parsers" % hexversion)
+ "import mercurial.cext.parsers" % hexversion)
cmd = "python -c \"%s\"" % code
# We need to do these tests inside a subprocess because parser.c's
# version-checking code happens inside the module init function, and
--- a/tests/test-patch-offset.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-patch-offset.t Tue Jun 20 16:33:46 2017 -0400
@@ -23,7 +23,7 @@
within this file. If the offset isn't tracked then the hunks can be
applied to the wrong lines of this file.
- $ python ../writepatterns.py a 34X 10A 1B 10A 1C 10A 1B 10A 1D 10A 1B 10A 1E 10A 1B 10A
+ $ $PYTHON ../writepatterns.py a 34X 10A 1B 10A 1C 10A 1B 10A 1D 10A 1B 10A 1E 10A 1B 10A
$ hg commit -Am adda
adding a
@@ -76,7 +76,7 @@
compare imported changes against reference file
- $ python ../writepatterns.py aref 34X 10A 1B 1a 9A 1C 10A 1B 10A 1D 10A 1B 1a 9A 1E 10A 1B 1a 9A
+ $ $PYTHON ../writepatterns.py aref 34X 10A 1B 1a 9A 1C 10A 1B 10A 1D 10A 1B 1a 9A 1E 10A 1B 1a 9A
$ diff aref a
$ cd ..
--- a/tests/test-patch.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-patch.t Tue Jun 20 16:33:46 2017 -0400
@@ -6,7 +6,7 @@
> EOF
$ echo "[ui]" >> $HGRCPATH
- $ echo "patch=python ../patchtool.py" >> $HGRCPATH
+ $ echo "patch=$PYTHON ../patchtool.py" >> $HGRCPATH
$ hg init a
$ cd a
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-patchbomb-bookmark.t Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,168 @@
+Create @ bookmark as main reference
+
+ $ hg init repo
+ $ cd repo
+ $ echo "[extensions]" >> $HGRCPATH
+ $ echo "patchbomb=" >> $HGRCPATH
+ $ hg book @
+
+Create a dummy revision that must never be exported
+
+ $ echo no > no
+ $ hg ci -Amno -d '6 0'
+ adding no
+
+Create a feature and use -B
+
+ $ hg book booktest
+ $ echo first > a
+ $ hg ci -Amfirst -d '7 0'
+ adding a
+ $ echo second > b
+ $ hg ci -Amsecond -d '8 0'
+ adding b
+ $ hg email --date '1981-1-1 0:1' -n -t foo -s bookmark -B booktest
+ From [test]: test
+ this patch series consists of 2 patches.
+
+
+ Write the introductory message for the patch series.
+
+ Cc:
+
+ displaying [PATCH 0 of 2] bookmark ...
+ Content-Type: text/plain; charset="us-ascii"
+ MIME-Version: 1.0
+ Content-Transfer-Encoding: 7bit
+ Subject: [PATCH 0 of 2] bookmark
+ Message-Id: <patchbomb.347155260@*> (glob)
+ User-Agent: Mercurial-patchbomb/* (glob)
+ Date: Thu, 01 Jan 1981 00:01:00 +0000
+ From: test
+ To: foo
+
+
+ displaying [PATCH 1 of 2] first ...
+ Content-Type: text/plain; charset="us-ascii"
+ MIME-Version: 1.0
+ Content-Transfer-Encoding: 7bit
+ Subject: [PATCH 1 of 2] first
+ X-Mercurial-Node: accde9b8b6dce861c185d0825c1affc09a79cb26
+ X-Mercurial-Series-Index: 1
+ X-Mercurial-Series-Total: 2
+ Message-Id: <accde9b8b6dce861c185.347155261@*> (glob)
+ X-Mercurial-Series-Id: <accde9b8b6dce861c185.347155261@*> (glob)
+ In-Reply-To: <patchbomb.347155260@*> (glob)
+ References: <patchbomb.347155260@*> (glob)
+ User-Agent: Mercurial-patchbomb/* (glob)
+ Date: Thu, 01 Jan 1981 00:01:01 +0000
+ From: test
+ To: foo
+
+ # HG changeset patch
+ # User test
+ # Date 7 0
+ # Thu Jan 01 00:00:07 1970 +0000
+ # Node ID accde9b8b6dce861c185d0825c1affc09a79cb26
+ # Parent 043bd3889e5aaf7d88fe3713cf425f782ad2fb71
+ first
+
+ diff -r 043bd3889e5a -r accde9b8b6dc a
+ --- /dev/null Thu Jan 01 00:00:00 1970 +0000
+ +++ b/a Thu Jan 01 00:00:07 1970 +0000
+ @@ -0,0 +1,1 @@
+ +first
+
+ displaying [PATCH 2 of 2] second ...
+ Content-Type: text/plain; charset="us-ascii"
+ MIME-Version: 1.0
+ Content-Transfer-Encoding: 7bit
+ Subject: [PATCH 2 of 2] second
+ X-Mercurial-Node: 417defd1559c396ba06a44dce8dc1c2d2d653f3f
+ X-Mercurial-Series-Index: 2
+ X-Mercurial-Series-Total: 2
+ Message-Id: <417defd1559c396ba06a.347155262@*> (glob)
+ X-Mercurial-Series-Id: <accde9b8b6dce861c185.347155261@*> (glob)
+ In-Reply-To: <patchbomb.347155260@*> (glob)
+ References: <patchbomb.347155260@*> (glob)
+ User-Agent: Mercurial-patchbomb/* (glob)
+ Date: Thu, 01 Jan 1981 00:01:02 +0000
+ From: test
+ To: foo
+
+ # HG changeset patch
+ # User test
+ # Date 8 0
+ # Thu Jan 01 00:00:08 1970 +0000
+ # Node ID 417defd1559c396ba06a44dce8dc1c2d2d653f3f
+ # Parent accde9b8b6dce861c185d0825c1affc09a79cb26
+ second
+
+ diff -r accde9b8b6dc -r 417defd1559c b
+ --- /dev/null Thu Jan 01 00:00:00 1970 +0000
+ +++ b/b Thu Jan 01 00:00:08 1970 +0000
+ @@ -0,0 +1,1 @@
+ +second
+
+Do the same and combine with -o only one must be exported
+
+ $ cd ..
+ $ hg clone repo repo2
+ updating to bookmark @
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ cd repo
+ $ hg up @
+ 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ (activating bookmark @)
+ $ hg book outgoing
+ $ echo 1 > x
+ $ hg ci -Am1 -d '8 0'
+ adding x
+ created new head
+ $ hg push ../repo2 -B outgoing
+ pushing to ../repo2
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files (+1 heads)
+ exporting bookmark outgoing
+ $ echo 2 > y
+ $ hg ci -Am2 -d '9 0'
+ adding y
+ $ hg email --date '1982-1-1 0:1' -n -t foo -s bookmark -B outgoing -o ../repo2
+ comparing with ../repo2
+ From [test]: test
+ this patch series consists of 1 patches.
+
+ Cc:
+
+ displaying [PATCH] bookmark ...
+ Content-Type: text/plain; charset="us-ascii"
+ MIME-Version: 1.0
+ Content-Transfer-Encoding: 7bit
+ Subject: [PATCH] bookmark
+ X-Mercurial-Node: 8dab2639fd35f1e337ad866c372a5c44f1064e3c
+ X-Mercurial-Series-Index: 1
+ X-Mercurial-Series-Total: 1
+ Message-Id: <8dab2639fd35f1e337ad.378691260@*> (glob)
+ X-Mercurial-Series-Id: <8dab2639fd35f1e337ad.378691260@*> (glob)
+ User-Agent: Mercurial-patchbomb/* (glob)
+ Date: Fri, 01 Jan 1982 00:01:00 +0000
+ From: test
+ To: foo
+
+ # HG changeset patch
+ # User test
+ # Date 9 0
+ # Thu Jan 01 00:00:09 1970 +0000
+ # Node ID 8dab2639fd35f1e337ad866c372a5c44f1064e3c
+ # Parent 0b24b8316483bf30bfc3e4d4168e922b169dbe66
+ 2
+
+ diff -r 0b24b8316483 -r 8dab2639fd35 y
+ --- /dev/null Thu Jan 01 00:00:00 1970 +0000
+ +++ b/y Thu Jan 01 00:00:09 1970 +0000
+ @@ -0,0 +1,1 @@
+ +2
+
--- a/tests/test-patchbomb-tls.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-patchbomb-tls.t Tue Jun 20 16:33:46 2017 -0400
@@ -5,7 +5,7 @@
$ CERTSDIR="$TESTDIR/sslcerts"
$ cat "$CERTSDIR/priv.pem" "$CERTSDIR/pub.pem" >> server.pem
- $ python "$TESTDIR/dummysmtpd.py" -p $HGPORT --pid-file a.pid -d \
+ $ $PYTHON "$TESTDIR/dummysmtpd.py" -p $HGPORT --pid-file a.pid -d \
> --tls smtps --certificate `pwd`/server.pem
listening at localhost:$HGPORT (?)
$ cat a.pid >> $DAEMON_PIDS
--- a/tests/test-patchbomb.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-patchbomb.t Tue Jun 20 16:33:46 2017 -0400
@@ -22,7 +22,7 @@
> skipblank = False
> print l,
> EOF
- $ FILTERBOUNDARY="python `pwd`/prune-blank-after-boundary.py"
+ $ FILTERBOUNDARY="$PYTHON `pwd`/prune-blank-after-boundary.py"
$ echo "[format]" >> $HGRCPATH
$ echo "usegeneraldelta=yes" >> $HGRCPATH
$ echo "[extensions]" >> $HGRCPATH
--- a/tests/test-phases.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-phases.t Tue Jun 20 16:33:46 2017 -0400
@@ -181,6 +181,7 @@
check that branch cache with "served" filter are properly computed and stored
$ ls ../push-dest/.hg/cache/branch2*
+ ../push-dest/.hg/cache/branch2-base
../push-dest/.hg/cache/branch2-served
$ cat ../push-dest/.hg/cache/branch2-served
6d6770faffce199f1fddd1cf87f6f026138cf061 6 465891ffab3c47a3c23792f7dc84156e19a90722
@@ -191,6 +192,7 @@
5:2713879da13d6eea1ff22b442a5a87cb31a7ce6a secret
3:b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e draft
$ ls ../push-dest/.hg/cache/branch2*
+ ../push-dest/.hg/cache/branch2-base
../push-dest/.hg/cache/branch2-served
../push-dest/.hg/cache/branch2-visible
$ cat ../push-dest/.hg/cache/branch2-served
--- a/tests/test-profile.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-profile.t Tue Jun 20 16:33:46 2017 -0400
@@ -4,10 +4,24 @@
$ hg init a
$ cd a
-#if lsprof
test --profile
+ $ hg st --profile 2>&1 | grep Sample
+ Sample count: \d+ (re)
+
+Abreviated version
+
+ $ hg st --prof 2>&1 | grep Sample
+ Sample count: \d+ (re)
+
+In alias
+
+ $ hg --config "alias.profst=status --profile" profst 2>&1 | grep Sample
+ Sample count: \d+ (re)
+
+#if lsprof
+
$ prof='hg --config profiling.type=ls --profile'
$ $prof st 2>../out
@@ -51,9 +65,9 @@
$ cat >> sleepext.py << EOF
> import time
- > from mercurial import cmdutil, commands
+ > from mercurial import registrar, commands
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
> @command('sleep', [], 'hg sleep')
> def sleep(ui, *args, **kwargs):
> time.sleep(0.1)
@@ -99,3 +113,51 @@
[1]
$ cd ..
+
+profiler extension could be loaded before other extensions
+
+ $ cat > fooprof.py <<EOF
+ > from __future__ import absolute_import
+ > import contextlib
+ > @contextlib.contextmanager
+ > def profile(ui, fp):
+ > print('fooprof: start profile')
+ > yield
+ > print('fooprof: end profile')
+ > def extsetup(ui):
+ > ui.write('fooprof: loaded\n')
+ > EOF
+
+ $ cat > otherextension.py <<EOF
+ > from __future__ import absolute_import
+ > def extsetup(ui):
+ > ui.write('otherextension: loaded\n')
+ > EOF
+
+ $ hg init b
+ $ cd b
+ $ cat >> .hg/hgrc <<EOF
+ > [extensions]
+ > other = $TESTTMP/otherextension.py
+ > fooprof = $TESTTMP/fooprof.py
+ > EOF
+
+ $ hg root
+ otherextension: loaded
+ fooprof: loaded
+ $TESTTMP/b (glob)
+ $ HGPROF=fooprof hg root --profile
+ fooprof: loaded
+ fooprof: start profile
+ otherextension: loaded
+ $TESTTMP/b (glob)
+ fooprof: end profile
+
+ $ HGPROF=other hg root --profile 2>&1 | head -n 2
+ otherextension: loaded
+ unrecognized profiler 'other' - ignored
+
+ $ HGPROF=unknown hg root --profile 2>&1 | head -n 1
+ unrecognized profiler 'unknown' - ignored
+
+ $ cd ..
--- a/tests/test-progress.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-progress.t Tue Jun 20 16:33:46 2017 -0400
@@ -1,10 +1,10 @@
$ cat > loop.py <<EOF
- > from mercurial import cmdutil, commands
+ > from mercurial import commands, registrar
> import time
>
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
>
> class incrementingtime(object):
> def __init__(self):
--- a/tests/test-pull-branch.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-pull-branch.t Tue Jun 20 16:33:46 2017 -0400
@@ -133,6 +133,7 @@
adding file changes
added 4 changesets with 4 changes to 1 files (+1 heads)
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "d740e1a584e7: a5.2"
1 other heads for branch "branchA"
Make changes on new branch on tt
--- a/tests/test-pull-update.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-pull-update.t Tue Jun 20 16:33:46 2017 -0400
@@ -41,6 +41,7 @@
adding file changes
added 1 changesets with 1 changes to 1 files (+1 heads)
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "800c91d5bfc1: m"
1 other heads for branch "default"
$ cd ../tt
@@ -55,6 +56,7 @@
adding file changes
added 1 changesets with 1 changes to 1 files (+1 heads)
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "107cefe13e42: m"
1 other heads for branch "default"
$ HGMERGE=true hg merge
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-push-checkheads-partial-C1.t Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,82 @@
+====================================
+Testing head checking code: Case C-2
+====================================
+
+Mercurial checks for the introduction of new heads on push. Evolution comes
+into play to detect if existing branches on the server are being replaced by
+some of the new one we push.
+
+This case is part of a series of tests checking this behavior.
+
+Category C: checking case were the branch is only partially obsoleted.
+TestCase 1: 2 changeset branch, only the head is rewritten
+
+.. old-state:
+..
+.. * 2 changeset branch
+..
+.. new-state:
+..
+.. * 1 new changesets branches superceeding only the head of the old one
+.. * base of the old branch is still alive
+..
+.. expected-result:
+..
+.. * push denied
+..
+.. graph-summary:
+..
+.. B ø⇠◔ B'
+.. | |
+.. A ○ |
+.. |/
+.. ○
+
+ $ . $TESTDIR/testlib/push-checkheads-util.sh
+
+Test setup
+----------
+
+ $ mkdir C1
+ $ cd C1
+ $ setuprepos
+ creating basic server and client repo
+ updating to branch default
+ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ cd server
+ $ mkcommit B0
+ $ cd ../client
+ $ hg pull
+ pulling from $TESTTMP/C1/server (glob)
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+ (run 'hg update' to get a working copy)
+ $ hg up 0
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ mkcommit B1
+ created new head
+ $ hg debugobsolete `getid "desc(B0)" ` `getid "desc(B1)"`
+ $ hg log -G --hidden
+ @ 25c56d33e4c4 (draft): B1
+ |
+ | x d73caddc5533 (draft): B0
+ | |
+ | o 8aaa48160adc (draft): A0
+ |/
+ o 1e4be0697311 (public): root
+
+
+Actual testing
+--------------
+
+ $ hg push
+ pushing to $TESTTMP/C1/server (glob)
+ searching for changes
+ abort: push creates new remote head 25c56d33e4c4!
+ (merge or see 'hg help push' for details about pushing new heads)
+ [255]
+
+ $ cd ../..
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-push-race.t Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,1839 @@
+============================================================================================
+Test cases where there are race condition between two clients pushing to the same repository
+============================================================================================
+
+This file tests cases where two clients push to a server at the same time. The
+"raced" client is done preparing it push bundle when the "racing" client
+perform its push. The "raced" client starts its actual push after the "racing"
+client push is fully complete.
+
+A set of extension and shell functions ensures this scheduling.
+
+ $ cat >> delaypush.py << EOF
+ > """small extension orchestrate push race
+ >
+ > Client with the extensions will create a file when ready and get stuck until
+ > a file is created."""
+ >
+ > import atexit
+ > import errno
+ > import os
+ > import time
+ >
+ > from mercurial import (
+ > exchange,
+ > extensions,
+ > )
+ >
+ > def delaypush(orig, pushop):
+ > # notify we are done preparing
+ > readypath = pushop.repo.ui.config('delaypush', 'ready-path', None)
+ > if readypath is not None:
+ > with open(readypath, 'w') as r:
+ > r.write('foo')
+ > pushop.repo.ui.status('wrote ready: %s\n' % readypath)
+ > # now wait for the other process to be done
+ > watchpath = pushop.repo.ui.config('delaypush', 'release-path', None)
+ > if watchpath is not None:
+ > pushop.repo.ui.status('waiting on: %s\n' % watchpath)
+ > limit = 100
+ > while 0 < limit and not os.path.exists(watchpath):
+ > limit -= 1
+ > time.sleep(0.1)
+ > if limit <= 0:
+ > repo.ui.warn('exiting without watchfile: %s' % watchpath)
+ > else:
+ > # delete the file at the end of the push
+ > def delete():
+ > try:
+ > os.unlink(watchpath)
+ > except OSError as exc:
+ > if exc.errno != errno.ENOENT:
+ > raise
+ > atexit.register(delete)
+ > return orig(pushop)
+ >
+ > def uisetup(ui):
+ > extensions.wrapfunction(exchange, '_pushbundle2', delaypush)
+ > EOF
+
+ $ waiton () {
+ > # wait for a file to be created (then delete it)
+ > count=100
+ > while [ ! -f $1 ] ;
+ > do
+ > sleep 0.1;
+ > count=`expr $count - 1`;
+ > if [ $count -lt 0 ];
+ > then
+ > break
+ > fi;
+ > done
+ > [ -f $1 ] || echo "ready file still missing: $1"
+ > rm -f $1
+ > }
+
+ $ release () {
+ > # create a file and wait for it be deleted
+ > count=100
+ > touch $1
+ > while [ -f $1 ] ;
+ > do
+ > sleep 0.1;
+ > count=`expr $count - 1`;
+ > if [ $count -lt 0 ];
+ > then
+ > break
+ > fi;
+ > done
+ > [ ! -f $1 ] || echo "delay file still exist: $1"
+ > }
+
+ $ cat >> $HGRCPATH << EOF
+ > [ui]
+ > ssh = python "$TESTDIR/dummyssh"
+ > # simplify output
+ > logtemplate = {node|short} {desc} ({branch})
+ > [phases]
+ > publish = no
+ > [experimental]
+ > evolution = all
+ > [alias]
+ > graph = log -G --rev 'sort(all(), "topo")'
+ > EOF
+
+We tests multiple cases:
+* strict: no race detected,
+* unrelated: race on unrelated heads are allowed.
+
+#testcases strict unrelated
+
+#if unrelated
+
+ $ cat >> $HGRCPATH << EOF
+ > [server]
+ > concurrent-push-mode = check-related
+ > EOF
+
+#endif
+
+Setup
+-----
+
+create a repo with one root
+
+ $ hg init server
+ $ cd server
+ $ echo root > root
+ $ hg ci -Am "C-ROOT"
+ adding root
+ $ cd ..
+
+clone it in two clients
+
+ $ hg clone ssh://user@dummy/server client-racy
+ requesting all changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+ updating to branch default
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg clone ssh://user@dummy/server client-other
+ requesting all changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+ updating to branch default
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+setup one to allow race on push
+
+ $ cat >> client-racy/.hg/hgrc << EOF
+ > [extensions]
+ > delaypush = $TESTTMP/delaypush.py
+ > [delaypush]
+ > ready-path = $TESTTMP/readyfile
+ > release-path = $TESTTMP/watchfile
+ > EOF
+
+Simple race, both try to push to the server at the same time
+------------------------------------------------------------
+
+Both try to replace the same head
+
+# a
+# | b
+# |/
+# *
+
+Creating changesets
+
+ $ echo b > client-other/a
+ $ hg -R client-other/ add client-other/a
+ $ hg -R client-other/ commit -m "C-A"
+ $ echo b > client-racy/b
+ $ hg -R client-racy/ add client-racy/b
+ $ hg -R client-racy/ commit -m "C-B"
+
+Pushing
+
+ $ hg -R client-racy push -r 'tip' > ./push-log 2>&1 &
+
+ $ waiton $TESTTMP/readyfile
+
+ $ hg -R client-other push -r 'tip'
+ pushing to ssh://user@dummy/server
+ searching for changes
+ remote: adding changesets
+ remote: adding manifests
+ remote: adding file changes
+ remote: added 1 changesets with 1 changes to 1 files
+
+ $ release $TESTTMP/watchfile
+
+Check the result of the push
+
+ $ cat ./push-log
+ pushing to ssh://user@dummy/server
+ searching for changes
+ wrote ready: $TESTTMP/readyfile
+ waiting on: $TESTTMP/watchfile
+ abort: push failed:
+ 'repository changed while pushing - please try again'
+
+ $ hg -R server graph
+ o 98217d5a1659 C-A (default)
+ |
+ @ 842e2fac6304 C-ROOT (default)
+
+
+Pushing on two different heads
+------------------------------
+
+Both try to replace a different head
+
+# a b
+# | |
+# * *
+# |/
+# *
+
+(resync-all)
+
+ $ hg -R ./server pull ./client-racy
+ pulling from ./client-racy
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files (+1 heads)
+ (run 'hg heads' to see heads, 'hg merge' to merge)
+ $ hg -R ./client-other pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files (+1 heads)
+ (run 'hg heads' to see heads, 'hg merge' to merge)
+ $ hg -R ./client-racy pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files (+1 heads)
+ (run 'hg heads' to see heads, 'hg merge' to merge)
+
+ $ hg -R server graph
+ o a9149a1428e2 C-B (default)
+ |
+ | o 98217d5a1659 C-A (default)
+ |/
+ @ 842e2fac6304 C-ROOT (default)
+
+
+Creating changesets
+
+ $ echo aa >> client-other/a
+ $ hg -R client-other/ commit -m "C-C"
+ $ echo bb >> client-racy/b
+ $ hg -R client-racy/ commit -m "C-D"
+
+Pushing
+
+ $ hg -R client-racy push -r 'tip' > ./push-log 2>&1 &
+
+ $ waiton $TESTTMP/readyfile
+
+ $ hg -R client-other push -r 'tip'
+ pushing to ssh://user@dummy/server
+ searching for changes
+ remote: adding changesets
+ remote: adding manifests
+ remote: adding file changes
+ remote: added 1 changesets with 1 changes to 1 files
+
+ $ release $TESTTMP/watchfile
+
+Check the result of the push
+
+#if strict
+ $ cat ./push-log
+ pushing to ssh://user@dummy/server
+ searching for changes
+ wrote ready: $TESTTMP/readyfile
+ waiting on: $TESTTMP/watchfile
+ abort: push failed:
+ 'repository changed while pushing - please try again'
+
+ $ hg -R server graph
+ o 51c544a58128 C-C (default)
+ |
+ o 98217d5a1659 C-A (default)
+ |
+ | o a9149a1428e2 C-B (default)
+ |/
+ @ 842e2fac6304 C-ROOT (default)
+
+#endif
+#if unrelated
+
+(The two heads are unrelated, push should be allowed)
+
+ $ cat ./push-log
+ pushing to ssh://user@dummy/server
+ searching for changes
+ wrote ready: $TESTTMP/readyfile
+ waiting on: $TESTTMP/watchfile
+ remote: adding changesets
+ remote: adding manifests
+ remote: adding file changes
+ remote: added 1 changesets with 1 changes to 1 files
+
+ $ hg -R server graph
+ o 59e76faf78bd C-D (default)
+ |
+ o a9149a1428e2 C-B (default)
+ |
+ | o 51c544a58128 C-C (default)
+ | |
+ | o 98217d5a1659 C-A (default)
+ |/
+ @ 842e2fac6304 C-ROOT (default)
+
+#endif
+
+Pushing while someone creates a new head
+-----------------------------------------
+
+Pushing a new changeset while someone creates a new branch.
+
+# a (raced)
+# |
+# * b
+# |/
+# *
+
+(resync-all)
+
+#if strict
+
+ $ hg -R ./server pull ./client-racy
+ pulling from ./client-racy
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+ (run 'hg update' to get a working copy)
+
+#endif
+#if unrelated
+
+ $ hg -R ./server pull ./client-racy
+ pulling from ./client-racy
+ searching for changes
+ no changes found
+
+#endif
+
+ $ hg -R ./client-other pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+ (run 'hg update' to get a working copy)
+ $ hg -R ./client-racy pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+ (run 'hg update' to get a working copy)
+
+ $ hg -R server graph
+ o 59e76faf78bd C-D (default)
+ |
+ o a9149a1428e2 C-B (default)
+ |
+ | o 51c544a58128 C-C (default)
+ | |
+ | o 98217d5a1659 C-A (default)
+ |/
+ @ 842e2fac6304 C-ROOT (default)
+
+
+Creating changesets
+
+(new head)
+
+ $ hg -R client-other/ up 'desc("C-A")'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo aaa >> client-other/a
+ $ hg -R client-other/ commit -m "C-E"
+ created new head
+
+(children of existing head)
+
+ $ hg -R client-racy/ up 'desc("C-C")'
+ 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ echo bbb >> client-racy/a
+ $ hg -R client-racy/ commit -m "C-F"
+
+Pushing
+
+ $ hg -R client-racy push -r 'tip' > ./push-log 2>&1 &
+
+ $ waiton $TESTTMP/readyfile
+
+ $ hg -R client-other push -fr 'tip'
+ pushing to ssh://user@dummy/server
+ searching for changes
+ remote: adding changesets
+ remote: adding manifests
+ remote: adding file changes
+ remote: added 1 changesets with 1 changes to 1 files (+1 heads)
+
+ $ release $TESTTMP/watchfile
+
+Check the result of the push
+
+#if strict
+
+ $ cat ./push-log
+ pushing to ssh://user@dummy/server
+ searching for changes
+ wrote ready: $TESTTMP/readyfile
+ waiting on: $TESTTMP/watchfile
+ abort: push failed:
+ 'repository changed while pushing - please try again'
+
+ $ hg -R server graph
+ o d603e2c0cdd7 C-E (default)
+ |
+ | o 51c544a58128 C-C (default)
+ |/
+ o 98217d5a1659 C-A (default)
+ |
+ | o 59e76faf78bd C-D (default)
+ | |
+ | o a9149a1428e2 C-B (default)
+ |/
+ @ 842e2fac6304 C-ROOT (default)
+
+
+#endif
+
+#if unrelated
+
+(The racing new head do not affect existing heads, push should go through)
+
+ $ cat ./push-log
+ pushing to ssh://user@dummy/server
+ searching for changes
+ wrote ready: $TESTTMP/readyfile
+ waiting on: $TESTTMP/watchfile
+ remote: adding changesets
+ remote: adding manifests
+ remote: adding file changes
+ remote: added 1 changesets with 1 changes to 1 files
+
+ $ hg -R server graph
+ o d9e379a8c432 C-F (default)
+ |
+ o 51c544a58128 C-C (default)
+ |
+ | o d603e2c0cdd7 C-E (default)
+ |/
+ o 98217d5a1659 C-A (default)
+ |
+ | o 59e76faf78bd C-D (default)
+ | |
+ | o a9149a1428e2 C-B (default)
+ |/
+ @ 842e2fac6304 C-ROOT (default)
+
+#endif
+
+Pushing touching different named branch (same topo): new branch raced
+---------------------------------------------------------------------
+
+Pushing two children on the same head, one is a different named branch
+
+# a (raced, branch-a)
+# |
+# | b (default branch)
+# |/
+# *
+
+(resync-all)
+
+#if strict
+
+ $ hg -R ./server pull ./client-racy
+ pulling from ./client-racy
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+ (run 'hg update' to get a working copy)
+
+#endif
+#if unrelated
+
+ $ hg -R ./server pull ./client-racy
+ pulling from ./client-racy
+ searching for changes
+ no changes found
+
+#endif
+
+ $ hg -R ./client-other pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+ (run 'hg update' to get a working copy)
+ $ hg -R ./client-racy pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files (+1 heads)
+ (run 'hg heads .' to see heads, 'hg merge' to merge)
+
+ $ hg -R server graph
+ o d9e379a8c432 C-F (default)
+ |
+ o 51c544a58128 C-C (default)
+ |
+ | o d603e2c0cdd7 C-E (default)
+ |/
+ o 98217d5a1659 C-A (default)
+ |
+ | o 59e76faf78bd C-D (default)
+ | |
+ | o a9149a1428e2 C-B (default)
+ |/
+ @ 842e2fac6304 C-ROOT (default)
+
+
+Creating changesets
+
+(update existing head)
+
+ $ hg -R client-other/ up 'desc("C-F")'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo aaa >> client-other/a
+ $ hg -R client-other/ commit -m "C-G"
+
+(new named branch from that existing head)
+
+ $ hg -R client-racy/ up 'desc("C-F")'
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo bbb >> client-racy/a
+ $ hg -R client-racy/ branch my-first-test-branch
+ marked working directory as branch my-first-test-branch
+ (branches are permanent and global, did you want a bookmark?)
+ $ hg -R client-racy/ commit -m "C-H"
+
+Pushing
+
+ $ hg -R client-racy push -r 'tip' --new-branch > ./push-log 2>&1 &
+
+ $ waiton $TESTTMP/readyfile
+
+ $ hg -R client-other push -fr 'tip'
+ pushing to ssh://user@dummy/server
+ searching for changes
+ remote: adding changesets
+ remote: adding manifests
+ remote: adding file changes
+ remote: added 1 changesets with 1 changes to 1 files
+
+ $ release $TESTTMP/watchfile
+
+Check the result of the push
+
+#if strict
+ $ cat ./push-log
+ pushing to ssh://user@dummy/server
+ searching for changes
+ wrote ready: $TESTTMP/readyfile
+ waiting on: $TESTTMP/watchfile
+ abort: push failed:
+ 'repository changed while pushing - please try again'
+
+ $ hg -R server graph
+ o 75d69cba5402 C-G (default)
+ |
+ o d9e379a8c432 C-F (default)
+ |
+ o 51c544a58128 C-C (default)
+ |
+ | o d603e2c0cdd7 C-E (default)
+ |/
+ o 98217d5a1659 C-A (default)
+ |
+ | o 59e76faf78bd C-D (default)
+ | |
+ | o a9149a1428e2 C-B (default)
+ |/
+ @ 842e2fac6304 C-ROOT (default)
+
+#endif
+#if unrelated
+
+(unrelated named branches are unrelated)
+
+ $ cat ./push-log
+ pushing to ssh://user@dummy/server
+ searching for changes
+ wrote ready: $TESTTMP/readyfile
+ waiting on: $TESTTMP/watchfile
+ remote: adding changesets
+ remote: adding manifests
+ remote: adding file changes
+ remote: added 1 changesets with 1 changes to 1 files (+1 heads)
+
+ $ hg -R server graph
+ o 833be552cfe6 C-H (my-first-test-branch)
+ |
+ | o 75d69cba5402 C-G (default)
+ |/
+ o d9e379a8c432 C-F (default)
+ |
+ o 51c544a58128 C-C (default)
+ |
+ | o d603e2c0cdd7 C-E (default)
+ |/
+ o 98217d5a1659 C-A (default)
+ |
+ | o 59e76faf78bd C-D (default)
+ | |
+ | o a9149a1428e2 C-B (default)
+ |/
+ @ 842e2fac6304 C-ROOT (default)
+
+#endif
+
+The racing new head do not affect existing heads, push should go through
+
+pushing touching different named branch (same topo): old branch raced
+---------------------------------------------------------------------
+
+Pushing two children on the same head, one is a different named branch
+
+# a (raced, default-branch)
+# |
+# | b (new branch)
+# |/
+# * (default-branch)
+
+(resync-all)
+
+#if strict
+
+ $ hg -R ./server pull ./client-racy
+ pulling from ./client-racy
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files (+1 heads)
+ (run 'hg heads .' to see heads, 'hg merge' to merge)
+
+#endif
+#if unrelated
+
+ $ hg -R ./server pull ./client-racy
+ pulling from ./client-racy
+ searching for changes
+ no changes found
+
+#endif
+
+ $ hg -R ./client-other pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files (+1 heads)
+ (run 'hg heads .' to see heads, 'hg merge' to merge)
+ $ hg -R ./client-racy pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files (+1 heads)
+ (run 'hg heads' to see heads)
+
+ $ hg -R server graph
+ o 833be552cfe6 C-H (my-first-test-branch)
+ |
+ | o 75d69cba5402 C-G (default)
+ |/
+ o d9e379a8c432 C-F (default)
+ |
+ o 51c544a58128 C-C (default)
+ |
+ | o d603e2c0cdd7 C-E (default)
+ |/
+ o 98217d5a1659 C-A (default)
+ |
+ | o 59e76faf78bd C-D (default)
+ | |
+ | o a9149a1428e2 C-B (default)
+ |/
+ @ 842e2fac6304 C-ROOT (default)
+
+
+Creating changesets
+
+(new named branch from one head)
+
+ $ hg -R client-other/ up 'desc("C-G")'
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo aaa >> client-other/a
+ $ hg -R client-other/ branch my-second-test-branch
+ marked working directory as branch my-second-test-branch
+ $ hg -R client-other/ commit -m "C-I"
+
+(children "updating" that same head)
+
+ $ hg -R client-racy/ up 'desc("C-G")'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo bbb >> client-racy/a
+ $ hg -R client-racy/ commit -m "C-J"
+
+Pushing
+
+ $ hg -R client-racy push -r 'tip' > ./push-log 2>&1 &
+
+ $ waiton $TESTTMP/readyfile
+
+ $ hg -R client-other push -fr 'tip' --new-branch
+ pushing to ssh://user@dummy/server
+ searching for changes
+ remote: adding changesets
+ remote: adding manifests
+ remote: adding file changes
+ remote: added 1 changesets with 1 changes to 1 files
+
+ $ release $TESTTMP/watchfile
+
+Check the result of the push
+
+#if strict
+
+ $ cat ./push-log
+ pushing to ssh://user@dummy/server
+ searching for changes
+ wrote ready: $TESTTMP/readyfile
+ waiting on: $TESTTMP/watchfile
+ abort: push failed:
+ 'repository changed while pushing - please try again'
+
+ $ hg -R server graph
+ o b35ed749f288 C-I (my-second-test-branch)
+ |
+ o 75d69cba5402 C-G (default)
+ |
+ | o 833be552cfe6 C-H (my-first-test-branch)
+ |/
+ o d9e379a8c432 C-F (default)
+ |
+ o 51c544a58128 C-C (default)
+ |
+ | o d603e2c0cdd7 C-E (default)
+ |/
+ o 98217d5a1659 C-A (default)
+ |
+ | o 59e76faf78bd C-D (default)
+ | |
+ | o a9149a1428e2 C-B (default)
+ |/
+ @ 842e2fac6304 C-ROOT (default)
+
+
+#endif
+
+#if unrelated
+
+(unrelated named branches are unrelated)
+
+ $ cat ./push-log
+ pushing to ssh://user@dummy/server
+ searching for changes
+ wrote ready: $TESTTMP/readyfile
+ waiting on: $TESTTMP/watchfile
+ remote: adding changesets
+ remote: adding manifests
+ remote: adding file changes
+ remote: added 1 changesets with 1 changes to 1 files (+1 heads)
+
+ $ hg -R server graph
+ o 89420bf00fae C-J (default)
+ |
+ | o b35ed749f288 C-I (my-second-test-branch)
+ |/
+ o 75d69cba5402 C-G (default)
+ |
+ | o 833be552cfe6 C-H (my-first-test-branch)
+ |/
+ o d9e379a8c432 C-F (default)
+ |
+ o 51c544a58128 C-C (default)
+ |
+ | o d603e2c0cdd7 C-E (default)
+ |/
+ o 98217d5a1659 C-A (default)
+ |
+ | o 59e76faf78bd C-D (default)
+ | |
+ | o a9149a1428e2 C-B (default)
+ |/
+ @ 842e2fac6304 C-ROOT (default)
+
+
+#endif
+
+pushing racing push touch multiple heads
+----------------------------------------
+
+There are multiple heads, but the racing push touch all of them
+
+# a (raced)
+# | b
+# |/|
+# * *
+# |/
+# *
+
+(resync-all)
+
+#if strict
+
+ $ hg -R ./server pull ./client-racy
+ pulling from ./client-racy
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files (+1 heads)
+ (run 'hg heads .' to see heads, 'hg merge' to merge)
+
+#endif
+
+#if unrelated
+
+ $ hg -R ./server pull ./client-racy
+ pulling from ./client-racy
+ searching for changes
+ no changes found
+
+#endif
+
+ $ hg -R ./client-other pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files (+1 heads)
+ (run 'hg heads' to see heads)
+ $ hg -R ./client-racy pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files (+1 heads)
+ (run 'hg heads .' to see heads, 'hg merge' to merge)
+
+ $ hg -R server graph
+ o 89420bf00fae C-J (default)
+ |
+ | o b35ed749f288 C-I (my-second-test-branch)
+ |/
+ o 75d69cba5402 C-G (default)
+ |
+ | o 833be552cfe6 C-H (my-first-test-branch)
+ |/
+ o d9e379a8c432 C-F (default)
+ |
+ o 51c544a58128 C-C (default)
+ |
+ | o d603e2c0cdd7 C-E (default)
+ |/
+ o 98217d5a1659 C-A (default)
+ |
+ | o 59e76faf78bd C-D (default)
+ | |
+ | o a9149a1428e2 C-B (default)
+ |/
+ @ 842e2fac6304 C-ROOT (default)
+
+
+Creating changesets
+
+(merges heads)
+
+ $ hg -R client-other/ up 'desc("C-E")'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg -R client-other/ merge 'desc("C-D")'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ hg -R client-other/ commit -m "C-K"
+
+(update one head)
+
+ $ hg -R client-racy/ up 'desc("C-D")'
+ 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ echo bbb >> client-racy/b
+ $ hg -R client-racy/ commit -m "C-L"
+
+Pushing
+
+ $ hg -R client-racy push -r 'tip' > ./push-log 2>&1 &
+
+ $ waiton $TESTTMP/readyfile
+
+ $ hg -R client-other push -fr 'tip' --new-branch
+ pushing to ssh://user@dummy/server
+ searching for changes
+ remote: adding changesets
+ remote: adding manifests
+ remote: adding file changes
+ remote: added 1 changesets with 0 changes to 0 files (-1 heads)
+
+ $ release $TESTTMP/watchfile
+
+Check the result of the push
+
+ $ cat ./push-log
+ pushing to ssh://user@dummy/server
+ searching for changes
+ wrote ready: $TESTTMP/readyfile
+ waiting on: $TESTTMP/watchfile
+ abort: push failed:
+ 'repository changed while pushing - please try again'
+
+ $ hg -R server graph
+ o be705100c623 C-K (default)
+ |\
+ | o d603e2c0cdd7 C-E (default)
+ | |
+ o | 59e76faf78bd C-D (default)
+ | |
+ | | o 89420bf00fae C-J (default)
+ | | |
+ | | | o b35ed749f288 C-I (my-second-test-branch)
+ | | |/
+ | | o 75d69cba5402 C-G (default)
+ | | |
+ | | | o 833be552cfe6 C-H (my-first-test-branch)
+ | | |/
+ | | o d9e379a8c432 C-F (default)
+ | | |
+ | | o 51c544a58128 C-C (default)
+ | |/
+ o | a9149a1428e2 C-B (default)
+ | |
+ | o 98217d5a1659 C-A (default)
+ |/
+ @ 842e2fac6304 C-ROOT (default)
+
+
+pushing raced push touch multiple heads
+---------------------------------------
+
+There are multiple heads, the raced push touch all of them
+
+# b
+# | a (raced)
+# |/|
+# * *
+# |/
+# *
+
+(resync-all)
+
+ $ hg -R ./server pull ./client-racy
+ pulling from ./client-racy
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files (+1 heads)
+ (run 'hg heads .' to see heads, 'hg merge' to merge)
+ $ hg -R ./client-other pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files (+1 heads)
+ (run 'hg heads .' to see heads, 'hg merge' to merge)
+ $ hg -R ./client-racy pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 0 changes to 0 files
+ (run 'hg update' to get a working copy)
+
+ $ hg -R server graph
+ o cac2cead0ff0 C-L (default)
+ |
+ | o be705100c623 C-K (default)
+ |/|
+ | o d603e2c0cdd7 C-E (default)
+ | |
+ o | 59e76faf78bd C-D (default)
+ | |
+ | | o 89420bf00fae C-J (default)
+ | | |
+ | | | o b35ed749f288 C-I (my-second-test-branch)
+ | | |/
+ | | o 75d69cba5402 C-G (default)
+ | | |
+ | | | o 833be552cfe6 C-H (my-first-test-branch)
+ | | |/
+ | | o d9e379a8c432 C-F (default)
+ | | |
+ | | o 51c544a58128 C-C (default)
+ | |/
+ o | a9149a1428e2 C-B (default)
+ | |
+ | o 98217d5a1659 C-A (default)
+ |/
+ @ 842e2fac6304 C-ROOT (default)
+
+
+Creating changesets
+
+(update existing head)
+
+ $ echo aaa >> client-other/a
+ $ hg -R client-other/ commit -m "C-M"
+
+(merge heads)
+
+ $ hg -R client-racy/ merge 'desc("C-K")'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ hg -R client-racy/ commit -m "C-N"
+
+Pushing
+
+ $ hg -R client-racy push -r 'tip' > ./push-log 2>&1 &
+
+ $ waiton $TESTTMP/readyfile
+
+ $ hg -R client-other push -fr 'tip' --new-branch
+ pushing to ssh://user@dummy/server
+ searching for changes
+ remote: adding changesets
+ remote: adding manifests
+ remote: adding file changes
+ remote: added 1 changesets with 1 changes to 1 files
+
+ $ release $TESTTMP/watchfile
+
+Check the result of the push
+
+ $ cat ./push-log
+ pushing to ssh://user@dummy/server
+ searching for changes
+ wrote ready: $TESTTMP/readyfile
+ waiting on: $TESTTMP/watchfile
+ abort: push failed:
+ 'repository changed while pushing - please try again'
+
+ $ hg -R server graph
+ o 6fd3090135df C-M (default)
+ |
+ o be705100c623 C-K (default)
+ |\
+ | o d603e2c0cdd7 C-E (default)
+ | |
+ +---o cac2cead0ff0 C-L (default)
+ | |
+ o | 59e76faf78bd C-D (default)
+ | |
+ | | o 89420bf00fae C-J (default)
+ | | |
+ | | | o b35ed749f288 C-I (my-second-test-branch)
+ | | |/
+ | | o 75d69cba5402 C-G (default)
+ | | |
+ | | | o 833be552cfe6 C-H (my-first-test-branch)
+ | | |/
+ | | o d9e379a8c432 C-F (default)
+ | | |
+ | | o 51c544a58128 C-C (default)
+ | |/
+ o | a9149a1428e2 C-B (default)
+ | |
+ | o 98217d5a1659 C-A (default)
+ |/
+ @ 842e2fac6304 C-ROOT (default)
+
+
+racing commit push a new head behind another named branch
+---------------------------------------------------------
+
+non-continuous branch are valid case, we tests for them.
+
+# b (branch default)
+# |
+# o (branch foo)
+# |
+# | a (raced, branch default)
+# |/
+# * (branch foo)
+# |
+# * (branch default)
+
+(resync-all + other branch)
+
+ $ hg -R ./server pull ./client-racy
+ pulling from ./client-racy
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 0 changes to 0 files
+ (run 'hg update' to get a working copy)
+
+(creates named branch on head)
+
+ $ hg -R ./server/ up 'desc("C-N")'
+ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg -R ./server/ branch other
+ marked working directory as branch other
+ $ hg -R ./server/ ci -m "C-Z"
+ $ hg -R ./server/ up null
+ 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
+
+(sync client)
+
+ $ hg -R ./client-other pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 2 changesets with 0 changes to 0 files
+ (run 'hg update' to get a working copy)
+ $ hg -R ./client-racy pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 2 changesets with 1 changes to 1 files (+1 heads)
+ (run 'hg heads .' to see heads, 'hg merge' to merge)
+
+ $ hg -R server graph
+ o 55a6f1c01b48 C-Z (other)
+ |
+ o 866a66e18630 C-N (default)
+ |\
+ +---o 6fd3090135df C-M (default)
+ | |
+ | o cac2cead0ff0 C-L (default)
+ | |
+ o | be705100c623 C-K (default)
+ |\|
+ o | d603e2c0cdd7 C-E (default)
+ | |
+ | o 59e76faf78bd C-D (default)
+ | |
+ | | o 89420bf00fae C-J (default)
+ | | |
+ | | | o b35ed749f288 C-I (my-second-test-branch)
+ | | |/
+ | | o 75d69cba5402 C-G (default)
+ | | |
+ | | | o 833be552cfe6 C-H (my-first-test-branch)
+ | | |/
+ | | o d9e379a8c432 C-F (default)
+ | | |
+ +---o 51c544a58128 C-C (default)
+ | |
+ | o a9149a1428e2 C-B (default)
+ | |
+ o | 98217d5a1659 C-A (default)
+ |/
+ o 842e2fac6304 C-ROOT (default)
+
+
+Creating changesets
+
+(update default head through another named branch one)
+
+ $ hg -R client-other/ up 'desc("C-Z")'
+ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo aaa >> client-other/a
+ $ hg -R client-other/ commit -m "C-O"
+ $ echo aaa >> client-other/a
+ $ hg -R client-other/ branch --force default
+ marked working directory as branch default
+ $ hg -R client-other/ commit -m "C-P"
+ created new head
+
+(update default head)
+
+ $ hg -R client-racy/ up 'desc("C-Z")'
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo bbb >> client-other/a
+ $ hg -R client-racy/ branch --force default
+ marked working directory as branch default
+ $ hg -R client-racy/ commit -m "C-Q"
+ created new head
+
+Pushing
+
+ $ hg -R client-racy push -r 'tip' > ./push-log 2>&1 &
+
+ $ waiton $TESTTMP/readyfile
+
+ $ hg -R client-other push -fr 'tip' --new-branch
+ pushing to ssh://user@dummy/server
+ searching for changes
+ remote: adding changesets
+ remote: adding manifests
+ remote: adding file changes
+ remote: added 2 changesets with 1 changes to 1 files
+
+ $ release $TESTTMP/watchfile
+
+Check the result of the push
+
+ $ cat ./push-log
+ pushing to ssh://user@dummy/server
+ searching for changes
+ wrote ready: $TESTTMP/readyfile
+ waiting on: $TESTTMP/watchfile
+ abort: push failed:
+ 'repository changed while pushing - please try again'
+
+ $ hg -R server graph
+ o 1b58ee3f79e5 C-P (default)
+ |
+ o d0a85b2252a9 C-O (other)
+ |
+ o 55a6f1c01b48 C-Z (other)
+ |
+ o 866a66e18630 C-N (default)
+ |\
+ +---o 6fd3090135df C-M (default)
+ | |
+ | o cac2cead0ff0 C-L (default)
+ | |
+ o | be705100c623 C-K (default)
+ |\|
+ o | d603e2c0cdd7 C-E (default)
+ | |
+ | o 59e76faf78bd C-D (default)
+ | |
+ | | o 89420bf00fae C-J (default)
+ | | |
+ | | | o b35ed749f288 C-I (my-second-test-branch)
+ | | |/
+ | | o 75d69cba5402 C-G (default)
+ | | |
+ | | | o 833be552cfe6 C-H (my-first-test-branch)
+ | | |/
+ | | o d9e379a8c432 C-F (default)
+ | | |
+ +---o 51c544a58128 C-C (default)
+ | |
+ | o a9149a1428e2 C-B (default)
+ | |
+ o | 98217d5a1659 C-A (default)
+ |/
+ o 842e2fac6304 C-ROOT (default)
+
+
+raced commit push a new head behind another named branch
+---------------------------------------------------------
+
+non-continuous branch are valid case, we tests for them.
+
+# b (raced branch default)
+# |
+# o (branch foo)
+# |
+# | a (branch default)
+# |/
+# * (branch foo)
+# |
+# * (branch default)
+
+(resync-all)
+
+ $ hg -R ./server pull ./client-racy
+ pulling from ./client-racy
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 0 changes to 0 files (+1 heads)
+ (run 'hg heads .' to see heads, 'hg merge' to merge)
+ $ hg -R ./client-other pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 0 changes to 0 files (+1 heads)
+ (run 'hg heads .' to see heads, 'hg merge' to merge)
+ $ hg -R ./client-racy pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 2 changesets with 1 changes to 1 files (+1 heads)
+ (run 'hg heads .' to see heads, 'hg merge' to merge)
+
+ $ hg -R server graph
+ o b0ee3d6f51bc C-Q (default)
+ |
+ | o 1b58ee3f79e5 C-P (default)
+ | |
+ | o d0a85b2252a9 C-O (other)
+ |/
+ o 55a6f1c01b48 C-Z (other)
+ |
+ o 866a66e18630 C-N (default)
+ |\
+ +---o 6fd3090135df C-M (default)
+ | |
+ | o cac2cead0ff0 C-L (default)
+ | |
+ o | be705100c623 C-K (default)
+ |\|
+ o | d603e2c0cdd7 C-E (default)
+ | |
+ | o 59e76faf78bd C-D (default)
+ | |
+ | | o 89420bf00fae C-J (default)
+ | | |
+ | | | o b35ed749f288 C-I (my-second-test-branch)
+ | | |/
+ | | o 75d69cba5402 C-G (default)
+ | | |
+ | | | o 833be552cfe6 C-H (my-first-test-branch)
+ | | |/
+ | | o d9e379a8c432 C-F (default)
+ | | |
+ +---o 51c544a58128 C-C (default)
+ | |
+ | o a9149a1428e2 C-B (default)
+ | |
+ o | 98217d5a1659 C-A (default)
+ |/
+ o 842e2fac6304 C-ROOT (default)
+
+
+Creating changesets
+
+(update 'other' named branch head)
+
+ $ hg -R client-other/ up 'desc("C-P")'
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo aaa >> client-other/a
+ $ hg -R client-other/ branch --force other
+ marked working directory as branch other
+ $ hg -R client-other/ commit -m "C-R"
+ created new head
+
+(update 'other named brnach through a 'default' changeset')
+
+ $ hg -R client-racy/ up 'desc("C-P")'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo bbb >> client-racy/a
+ $ hg -R client-racy/ commit -m "C-S"
+ $ echo bbb >> client-racy/a
+ $ hg -R client-racy/ branch --force other
+ marked working directory as branch other
+ $ hg -R client-racy/ commit -m "C-T"
+ created new head
+
+Pushing
+
+ $ hg -R client-racy push -r 'tip' > ./push-log 2>&1 &
+
+ $ waiton $TESTTMP/readyfile
+
+ $ hg -R client-other push -fr 'tip' --new-branch
+ pushing to ssh://user@dummy/server
+ searching for changes
+ remote: adding changesets
+ remote: adding manifests
+ remote: adding file changes
+ remote: added 1 changesets with 1 changes to 1 files
+
+ $ release $TESTTMP/watchfile
+
+Check the result of the push
+
+ $ cat ./push-log
+ pushing to ssh://user@dummy/server
+ searching for changes
+ wrote ready: $TESTTMP/readyfile
+ waiting on: $TESTTMP/watchfile
+ abort: push failed:
+ 'repository changed while pushing - please try again'
+
+ $ hg -R server graph
+ o de7b9e2ba3f6 C-R (other)
+ |
+ o 1b58ee3f79e5 C-P (default)
+ |
+ o d0a85b2252a9 C-O (other)
+ |
+ | o b0ee3d6f51bc C-Q (default)
+ |/
+ o 55a6f1c01b48 C-Z (other)
+ |
+ o 866a66e18630 C-N (default)
+ |\
+ +---o 6fd3090135df C-M (default)
+ | |
+ | o cac2cead0ff0 C-L (default)
+ | |
+ o | be705100c623 C-K (default)
+ |\|
+ o | d603e2c0cdd7 C-E (default)
+ | |
+ | o 59e76faf78bd C-D (default)
+ | |
+ | | o 89420bf00fae C-J (default)
+ | | |
+ | | | o b35ed749f288 C-I (my-second-test-branch)
+ | | |/
+ | | o 75d69cba5402 C-G (default)
+ | | |
+ | | | o 833be552cfe6 C-H (my-first-test-branch)
+ | | |/
+ | | o d9e379a8c432 C-F (default)
+ | | |
+ +---o 51c544a58128 C-C (default)
+ | |
+ | o a9149a1428e2 C-B (default)
+ | |
+ o | 98217d5a1659 C-A (default)
+ |/
+ o 842e2fac6304 C-ROOT (default)
+
+
+raced commit push a new head obsoleting the one touched by the racing push
+--------------------------------------------------------------------------
+
+# b (racing)
+# |
+# ø⇠◔ a (raced)
+# |/
+# *
+
+(resync-all)
+
+ $ hg -R ./server pull ./client-racy
+ pulling from ./client-racy
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 2 changesets with 2 changes to 1 files (+1 heads)
+ (run 'hg heads .' to see heads, 'hg merge' to merge)
+ $ hg -R ./client-other pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 2 changesets with 2 changes to 1 files (+1 heads)
+ (run 'hg heads' to see heads, 'hg merge' to merge)
+ $ hg -R ./client-racy pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files (+1 heads)
+ (run 'hg heads' to see heads, 'hg merge' to merge)
+
+ $ hg -R server graph
+ o 3d57ed3c1091 C-T (other)
+ |
+ o 2efd43f7b5ba C-S (default)
+ |
+ | o de7b9e2ba3f6 C-R (other)
+ |/
+ o 1b58ee3f79e5 C-P (default)
+ |
+ o d0a85b2252a9 C-O (other)
+ |
+ | o b0ee3d6f51bc C-Q (default)
+ |/
+ o 55a6f1c01b48 C-Z (other)
+ |
+ o 866a66e18630 C-N (default)
+ |\
+ +---o 6fd3090135df C-M (default)
+ | |
+ | o cac2cead0ff0 C-L (default)
+ | |
+ o | be705100c623 C-K (default)
+ |\|
+ o | d603e2c0cdd7 C-E (default)
+ | |
+ | o 59e76faf78bd C-D (default)
+ | |
+ | | o 89420bf00fae C-J (default)
+ | | |
+ | | | o b35ed749f288 C-I (my-second-test-branch)
+ | | |/
+ | | o 75d69cba5402 C-G (default)
+ | | |
+ | | | o 833be552cfe6 C-H (my-first-test-branch)
+ | | |/
+ | | o d9e379a8c432 C-F (default)
+ | | |
+ +---o 51c544a58128 C-C (default)
+ | |
+ | o a9149a1428e2 C-B (default)
+ | |
+ o | 98217d5a1659 C-A (default)
+ |/
+ o 842e2fac6304 C-ROOT (default)
+
+
+Creating changesets and markers
+
+(continue existing head)
+
+ $ hg -R client-other/ up 'desc("C-Q")'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo aaa >> client-other/a
+ $ hg -R client-other/ commit -m "C-U"
+
+(new topo branch obsoleting that same head)
+
+ $ hg -R client-racy/ up 'desc("C-Z")'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo bbb >> client-racy/a
+ $ hg -R client-racy/ branch --force default
+ marked working directory as branch default
+ $ hg -R client-racy/ commit -m "C-V"
+ created new head
+ $ ID_Q=`hg -R client-racy log -T '{node}\n' -r 'desc("C-Q")'`
+ $ ID_V=`hg -R client-racy log -T '{node}\n' -r 'desc("C-V")'`
+ $ hg -R client-racy debugobsolete $ID_Q $ID_V
+
+Pushing
+
+ $ hg -R client-racy push -r 'tip' > ./push-log 2>&1 &
+
+ $ waiton $TESTTMP/readyfile
+
+ $ hg -R client-other push -fr 'tip' --new-branch
+ pushing to ssh://user@dummy/server
+ searching for changes
+ remote: adding changesets
+ remote: adding manifests
+ remote: adding file changes
+ remote: added 1 changesets with 0 changes to 0 files
+
+ $ release $TESTTMP/watchfile
+
+Check the result of the push
+
+ $ cat ./push-log
+ pushing to ssh://user@dummy/server
+ searching for changes
+ wrote ready: $TESTTMP/readyfile
+ waiting on: $TESTTMP/watchfile
+ abort: push failed:
+ 'repository changed while pushing - please try again'
+
+ $ hg -R server debugobsolete
+ $ hg -R server graph
+ o a98a47d8b85b C-U (default)
+ |
+ o b0ee3d6f51bc C-Q (default)
+ |
+ | o 3d57ed3c1091 C-T (other)
+ | |
+ | o 2efd43f7b5ba C-S (default)
+ | |
+ | | o de7b9e2ba3f6 C-R (other)
+ | |/
+ | o 1b58ee3f79e5 C-P (default)
+ | |
+ | o d0a85b2252a9 C-O (other)
+ |/
+ o 55a6f1c01b48 C-Z (other)
+ |
+ o 866a66e18630 C-N (default)
+ |\
+ +---o 6fd3090135df C-M (default)
+ | |
+ | o cac2cead0ff0 C-L (default)
+ | |
+ o | be705100c623 C-K (default)
+ |\|
+ o | d603e2c0cdd7 C-E (default)
+ | |
+ | o 59e76faf78bd C-D (default)
+ | |
+ | | o 89420bf00fae C-J (default)
+ | | |
+ | | | o b35ed749f288 C-I (my-second-test-branch)
+ | | |/
+ | | o 75d69cba5402 C-G (default)
+ | | |
+ | | | o 833be552cfe6 C-H (my-first-test-branch)
+ | | |/
+ | | o d9e379a8c432 C-F (default)
+ | | |
+ +---o 51c544a58128 C-C (default)
+ | |
+ | o a9149a1428e2 C-B (default)
+ | |
+ o | 98217d5a1659 C-A (default)
+ |/
+ o 842e2fac6304 C-ROOT (default)
+
+
+racing commit push a new head obsoleting the one touched by the raced push
+--------------------------------------------------------------------------
+
+(mirror test case of the previous one
+
+# a (raced branch default)
+# |
+# ø⇠◔ b (racing)
+# |/
+# *
+
+(resync-all)
+
+ $ hg -R ./server pull ./client-racy
+ pulling from ./client-racy
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files (+1 heads)
+ 1 new obsolescence markers
+ (run 'hg heads .' to see heads, 'hg merge' to merge)
+ $ hg -R ./client-other pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files (+1 heads)
+ 1 new obsolescence markers
+ (run 'hg heads .' to see heads, 'hg merge' to merge)
+ $ hg -R ./client-racy pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 0 changes to 0 files
+ (run 'hg update' to get a working copy)
+
+ $ hg -R server debugobsolete
+ b0ee3d6f51bc4c0ca6d4f2907708027a6c376233 720c5163ecf64dcc6216bee2d62bf3edb1882499 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ $ hg -R server graph
+ o 720c5163ecf6 C-V (default)
+ |
+ | o a98a47d8b85b C-U (default)
+ | |
+ | x b0ee3d6f51bc C-Q (default)
+ |/
+ | o 3d57ed3c1091 C-T (other)
+ | |
+ | o 2efd43f7b5ba C-S (default)
+ | |
+ | | o de7b9e2ba3f6 C-R (other)
+ | |/
+ | o 1b58ee3f79e5 C-P (default)
+ | |
+ | o d0a85b2252a9 C-O (other)
+ |/
+ o 55a6f1c01b48 C-Z (other)
+ |
+ o 866a66e18630 C-N (default)
+ |\
+ +---o 6fd3090135df C-M (default)
+ | |
+ | o cac2cead0ff0 C-L (default)
+ | |
+ o | be705100c623 C-K (default)
+ |\|
+ o | d603e2c0cdd7 C-E (default)
+ | |
+ | o 59e76faf78bd C-D (default)
+ | |
+ | | o 89420bf00fae C-J (default)
+ | | |
+ | | | o b35ed749f288 C-I (my-second-test-branch)
+ | | |/
+ | | o 75d69cba5402 C-G (default)
+ | | |
+ | | | o 833be552cfe6 C-H (my-first-test-branch)
+ | | |/
+ | | o d9e379a8c432 C-F (default)
+ | | |
+ +---o 51c544a58128 C-C (default)
+ | |
+ | o a9149a1428e2 C-B (default)
+ | |
+ o | 98217d5a1659 C-A (default)
+ |/
+ o 842e2fac6304 C-ROOT (default)
+
+
+Creating changesets and markers
+
+(new topo branch obsoleting that same head)
+
+ $ hg -R client-other/ up 'desc("C-Q")'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo bbb >> client-other/a
+ $ hg -R client-other/ branch --force default
+ marked working directory as branch default
+ $ hg -R client-other/ commit -m "C-W"
+ created new head
+ $ ID_V=`hg -R client-other log -T '{node}\n' -r 'desc("C-V")'`
+ $ ID_W=`hg -R client-other log -T '{node}\n' -r 'desc("C-W")'`
+ $ hg -R client-other debugobsolete $ID_V $ID_W
+
+(continue the same head)
+
+ $ echo aaa >> client-racy/a
+ $ hg -R client-racy/ commit -m "C-X"
+
+Pushing
+
+ $ hg -R client-racy push -r 'tip' > ./push-log 2>&1 &
+
+ $ waiton $TESTTMP/readyfile
+
+ $ hg -R client-other push -fr 'tip' --new-branch
+ pushing to ssh://user@dummy/server
+ searching for changes
+ remote: adding changesets
+ remote: adding manifests
+ remote: adding file changes
+ remote: added 1 changesets with 0 changes to 1 files (+1 heads)
+ remote: 1 new obsolescence markers
+
+ $ release $TESTTMP/watchfile
+
+Check the result of the push
+
+ $ cat ./push-log
+ pushing to ssh://user@dummy/server
+ searching for changes
+ wrote ready: $TESTTMP/readyfile
+ waiting on: $TESTTMP/watchfile
+ abort: push failed:
+ 'repository changed while pushing - please try again'
+
+ $ hg -R server debugobsolete
+ b0ee3d6f51bc4c0ca6d4f2907708027a6c376233 720c5163ecf64dcc6216bee2d62bf3edb1882499 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 720c5163ecf64dcc6216bee2d62bf3edb1882499 39bc0598afe90ab18da460bafecc0fa953b77596 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ $ hg -R server graph --hidden
+ o 39bc0598afe9 C-W (default)
+ |
+ | o a98a47d8b85b C-U (default)
+ |/
+ x b0ee3d6f51bc C-Q (default)
+ |
+ | o 3d57ed3c1091 C-T (other)
+ | |
+ | o 2efd43f7b5ba C-S (default)
+ | |
+ | | o de7b9e2ba3f6 C-R (other)
+ | |/
+ | o 1b58ee3f79e5 C-P (default)
+ | |
+ | o d0a85b2252a9 C-O (other)
+ |/
+ | x 720c5163ecf6 C-V (default)
+ |/
+ o 55a6f1c01b48 C-Z (other)
+ |
+ o 866a66e18630 C-N (default)
+ |\
+ +---o 6fd3090135df C-M (default)
+ | |
+ | o cac2cead0ff0 C-L (default)
+ | |
+ o | be705100c623 C-K (default)
+ |\|
+ o | d603e2c0cdd7 C-E (default)
+ | |
+ | o 59e76faf78bd C-D (default)
+ | |
+ | | o 89420bf00fae C-J (default)
+ | | |
+ | | | o b35ed749f288 C-I (my-second-test-branch)
+ | | |/
+ | | o 75d69cba5402 C-G (default)
+ | | |
+ | | | o 833be552cfe6 C-H (my-first-test-branch)
+ | | |/
+ | | o d9e379a8c432 C-F (default)
+ | | |
+ +---o 51c544a58128 C-C (default)
+ | |
+ | o a9149a1428e2 C-B (default)
+ | |
+ o | 98217d5a1659 C-A (default)
+ |/
+ o 842e2fac6304 C-ROOT (default)
+
--- a/tests/test-push-warn.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-push-warn.t Tue Jun 20 16:33:46 2017 -0400
@@ -41,7 +41,7 @@
searching for changes
taking quick initial sample
query 2; still undecided: 1, sample size is: 1
- 2 total queries
+ 2 total queries in *.????s (glob)
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
@@ -418,7 +418,7 @@
adding c
created new head
- $ for i in `python $TESTDIR/seq.py 3`; do hg -R h up -q 0; echo $i > h/b; hg -R h ci -qAm$i; done
+ $ for i in `$PYTHON $TESTDIR/seq.py 3`; do hg -R h up -q 0; echo $i > h/b; hg -R h ci -qAm$i; done
$ hg -R i push h
pushing to h
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-py3-commands.t Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,231 @@
+#require py3exe
+
+This test helps in keeping a track on which commands we can run on
+Python 3 and see what kind of errors are coming up.
+The full traceback is hidden to have a stable output.
+ $ HGBIN=`which hg`
+
+ $ for cmd in version debuginstall ; do
+ > echo $cmd
+ > $PYTHON3 $HGBIN $cmd 2>&1 2>&1 | tail -1
+ > done
+ version
+ warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+ debuginstall
+ no problems detected
+
+#if test-repo
+Make a clone so that any features in the developer's .hg/hgrc that
+might confuse Python 3 don't break this test. When we can do commit in
+Python 3, we'll stop doing this. We use e76ed1e480ef for the clone
+because it has different files than 273ce12ad8f1, so we can test both
+`files` from dirstate and `files` loaded from a specific revision.
+
+ $ hg clone -r e76ed1e480ef "`dirname "$TESTDIR"`" testrepo 2>&1 | tail -1
+ 15 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Test using -R, which exercises some URL code:
+ $ $PYTHON3 $HGBIN -R testrepo files -r 273ce12ad8f1 | tail -1
+ testrepo/tkmerge
+
+Now prove `hg files` is reading the whole manifest. We have to grep
+out some potential warnings that come from hgrc as yet.
+ $ cd testrepo
+ $ $PYTHON3 $HGBIN files -r 273ce12ad8f1
+ .hgignore
+ PKG-INFO
+ README
+ hg
+ mercurial/__init__.py
+ mercurial/byterange.py
+ mercurial/fancyopts.py
+ mercurial/hg.py
+ mercurial/mdiff.py
+ mercurial/revlog.py
+ mercurial/transaction.py
+ notes.txt
+ setup.py
+ tkmerge
+
+ $ $PYTHON3 $HGBIN files -r 273ce12ad8f1 | wc -l
+ \s*14 (re)
+ $ $PYTHON3 $HGBIN files | wc -l
+ \s*15 (re)
+
+Test if log-like commands work:
+
+ $ $PYTHON3 $HGBIN tip
+ changeset: 10:e76ed1e480ef
+ tag: tip
+ user: oxymoron@cinder.waste.org
+ date: Tue May 03 23:37:43 2005 -0800
+ summary: Fix linking of changeset revs when merging
+
+
+ $ $PYTHON3 $HGBIN log -r0
+ changeset: 0:9117c6561b0b
+ user: mpm@selenic.com
+ date: Tue May 03 13:16:10 2005 -0800
+ summary: Add back links from file revisions to changeset revisions
+
+
+ $ cd ..
+#endif
+
+Test if `hg config` works:
+
+ $ $PYTHON3 $HGBIN config
+ devel.all-warnings=true
+ devel.default-date=0 0
+ largefiles.usercache=$TESTTMP/.cache/largefiles
+ ui.slash=True
+ ui.interactive=False
+ ui.mergemarkers=detailed
+ ui.promptecho=True
+ web.address=localhost
+ web.ipv6=False
+
+ $ cat > included-hgrc <<EOF
+ > [extensions]
+ > babar = imaginary_elephant
+ > EOF
+ $ cat >> $HGRCPATH <<EOF
+ > %include $TESTTMP/included-hgrc
+ > EOF
+ $ $PYTHON3 $HGBIN version | tail -1
+ *** failed to import extension babar from imaginary_elephant: *: 'imaginary_elephant' (glob)
+ warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+ $ rm included-hgrc
+ $ touch included-hgrc
+
+Test bytes-ness of policy.policy with HGMODULEPOLICY
+
+ $ HGMODULEPOLICY=py
+ $ export HGMODULEPOLICY
+ $ $PYTHON3 `which hg` debuginstall 2>&1 2>&1 | tail -1
+ no problems detected
+
+`hg init` can create empty repos
+`hg status works fine`
+`hg summary` also works!
+
+ $ $PYTHON3 `which hg` init py3repo
+ $ cd py3repo
+ $ echo "This is the file 'iota'." > iota
+ $ $PYTHON3 $HGBIN status
+ ? iota
+ $ $PYTHON3 $HGBIN add iota
+ $ $PYTHON3 $HGBIN status
+ A iota
+ $ hg diff --nodates --git
+ diff --git a/iota b/iota
+ new file mode 100644
+ --- /dev/null
+ +++ b/iota
+ @@ -0,0 +1,1 @@
+ +This is the file 'iota'.
+ $ $PYTHON3 $HGBIN commit --message 'commit performed in Python 3'
+ $ $PYTHON3 $HGBIN status
+
+ $ mkdir A
+ $ echo "This is the file 'mu'." > A/mu
+ $ $PYTHON3 $HGBIN addremove
+ adding A/mu
+ $ $PYTHON3 $HGBIN status
+ A A/mu
+ $ HGEDITOR='echo message > ' $PYTHON3 $HGBIN commit
+ $ $PYTHON3 $HGBIN status
+ $ $PYHON3 $HGBIN summary
+ parent: 1:e1e9167203d4 tip
+ message
+ branch: default
+ commit: (clean)
+ update: (current)
+ phases: 2 draft
+
+Test weird unicode-vs-bytes stuff
+
+ $ $PYTHON3 $HGBIN help | egrep -v '^ |^$'
+ Mercurial Distributed SCM
+ list of commands:
+ additional help topics:
+ (use 'hg help -v' to show built-in aliases and global options)
+
+ $ $PYTHON3 $HGBIN help help | egrep -v '^ |^$'
+ hg help [-ecks] [TOPIC]
+ show help for a given topic or a help overview
+ options ([+] can be repeated):
+ (some details hidden, use --verbose to show complete help)
+
+ $ $PYTHON3 $HGBIN help -k notopic
+ abort: no matches
+ (try 'hg help' for a list of topics)
+ [255]
+
+Prove the repo is valid using the Python 2 `hg`:
+ $ hg verify
+ checking changesets
+ checking manifests
+ crosschecking files in changesets and manifests
+ checking files
+ 2 files, 2 changesets, 2 total revisions
+ $ hg log
+ changeset: 1:e1e9167203d4
+ tag: tip
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: message
+
+ changeset: 0:71c96e924262
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: commit performed in Python 3
+
+
+ $ $PYTHON3 $HGBIN log -G
+ @ changeset: 1:e1e9167203d4
+ | tag: tip
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: message
+ |
+ o changeset: 0:71c96e924262
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: commit performed in Python 3
+
+ $ $PYTHON3 $HGBIN log -Tjson
+ [
+ {
+ "rev": 1,
+ "node": "e1e9167203d450ca2f558af628955b5f5afd4489",
+ "branch": "default",
+ "phase": "draft",
+ "user": "test",
+ "date": [0, 0],
+ "desc": "message",
+ "bookmarks": [],
+ "tags": ["tip"],
+ "parents": ["71c96e924262969ff0d8d3d695b0f75412ccc3d8"]
+ },
+ {
+ "rev": 0,
+ "node": "71c96e924262969ff0d8d3d695b0f75412ccc3d8",
+ "branch": "default",
+ "phase": "draft",
+ "user": "test",
+ "date": [0, 0],
+ "desc": "commit performed in Python 3",
+ "bookmarks": [],
+ "tags": [],
+ "parents": ["0000000000000000000000000000000000000000"]
+ }
+ ]
+
+Show that update works now!
+
+ $ $PYTHON3 $HGBIN up 0
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ $PYTHON3 $HGBIN identify
+ 71c96e924262
--- a/tests/test-rebase-base.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-rebase-base.t Tue Jun 20 16:33:46 2017 -0400
@@ -298,18 +298,6 @@
|
o 0: M0
-Mixed rebasable and non-rebasable bases (unresolved, issue5422):
-
- $ rebasewithdag -b C+D -d B <<'EOS'
- > D
- > /
- > B C
- > |/
- > A
- > EOS
- nothing to rebase
- [1]
-
Disconnected graph:
$ rebasewithdag -b B -d Z <<'EOS'
--- a/tests/test-rebase-conflicts.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-rebase-conflicts.t Tue Jun 20 16:33:46 2017 -0400
@@ -250,7 +250,7 @@
rebased as 19c888675e13
rebasing 10:2f2496ddf49d "merge" (tip)
future parents are 11 and 7
- already in target
+ already in destination
merge against 10:2f2496ddf49d
detach base 9:e31216eec445
searching for copies back to rev 3
@@ -268,6 +268,7 @@
rebased as 2a7f09cac94c
rebase merging completed
rebase status stored
+ updating the branch cache
update back to initial working directory parent
resolving manifests
branchmerge: False, force: False, partial: False
@@ -303,10 +304,9 @@
added 2 changesets with 2 changes to 1 files
bundle2-input-part: total payload size 1686
bundle2-input-bundle: 0 parts total
+ updating the branch cache
invalid branchheads cache (served): tip differs
- history modification detected - truncating revision branch cache to revision 9
rebase completed
- truncating cache/rbc-revs-v1 to 72
Test minimization of merge conflicts
$ hg up -q null
--- a/tests/test-rebase-interruptions.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-rebase-interruptions.t Tue Jun 20 16:33:46 2017 -0400
@@ -186,7 +186,7 @@
Abort the rebasing:
$ hg rebase --abort
- warning: new changesets detected on target branch, can't strip
+ warning: new changesets detected on destination branch, can't strip
rebase aborted
$ hg tglog
@@ -272,3 +272,34 @@
o 0:public 'A'
$ cd ..
+
+Make sure merge state is cleaned up after a no-op rebase merge (issue5494)
+ $ hg init repo
+ $ cd repo
+ $ echo a > a
+ $ hg commit -qAm base
+ $ echo b >> a
+ $ hg commit -qm b
+ $ hg up '.^'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo c >> a
+ $ hg commit -qm c
+ $ hg rebase -s 1 -d 2 --noninteractive
+ rebasing 1:fdaca8533b86 "b"
+ merging a
+ warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
+ unresolved conflicts (see hg resolve, then hg rebase --continue)
+ [1]
+ $ echo a > a
+ $ echo c >> a
+ $ hg resolve --mark a
+ (no more unresolved files)
+ continue: hg rebase --continue
+ $ hg rebase --continue
+ rebasing 1:fdaca8533b86 "b"
+ note: rebase of 1:fdaca8533b86 created no changes to commit
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/fdaca8533b86-7fd70513-backup.hg (glob)
+ $ hg resolve --list
+ $ test -f .hg/merge
+ [1]
+
--- a/tests/test-rebase-obsolete.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-rebase-obsolete.t Tue Jun 20 16:33:46 2017 -0400
@@ -902,7 +902,7 @@
$ hg up 9520eea781bc
1 files updated, 0 files merged, 2 files removed, 0 files unresolved
$ echo 1 >> E
- $ hg commit --amend -m "E'"
+ $ hg commit --amend -m "E'" -d "0 0"
$ hg log -G
@ 9:69abe8906104 E'
|
@@ -967,7 +967,7 @@
$ hg up 2 && hg log -r . # working dir is at rev 2 again
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
2:1e9a3c00cbe9 b (no-eol)
- $ hg rebase -r 2 -d 3
+ $ hg rebase -r 2 -d 3 --config experimental.evolution.track-operation=1
note: not rebasing 2:1e9a3c00cbe9 "b" (mybook), already in destination as 3:be1832deae9a "b"
Check that working directory was updated to rev 3 although rev 2 was skipped
during the rebase operation
@@ -978,3 +978,5 @@
during the rebase operation
$ hg bookmarks
mybook 3:be1832deae9a
+ $ hg debugobsolete --rev tip
+ 1e9a3c00cbe90d236ac05ef61efcc5e40b7412bc be1832deae9ac531caa7438b8dcf6055a122cd8e 0 (*) {'user': 'test'} (glob)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-rebase-partial.t Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,95 @@
+Tests rebasing with part of the rebase set already in the
+destination (issue5422)
+
+ $ cat >> $HGRCPATH <<EOF
+ > [extensions]
+ > rebase=
+ > drawdag=$TESTDIR/drawdag.py
+ >
+ > [experimental]
+ > evolution=createmarkers,allowunstable
+ >
+ > [alias]
+ > tglog = log -G --template "{rev}: {desc}"
+ > EOF
+
+ $ rebasewithdag() {
+ > N=`$PYTHON -c "print($N+1)"`
+ > hg init repo$N && cd repo$N
+ > hg debugdrawdag
+ > hg rebase "$@" > _rebasetmp
+ > r=$?
+ > grep -v 'saved backup bundle' _rebasetmp
+ > [ $r -eq 0 ] && hg tglog
+ > cd ..
+ > return $r
+ > }
+
+Rebase two commits, of which one is already in the right place
+
+ $ rebasewithdag -r C+D -d B <<EOF
+ > C
+ > |
+ > B D
+ > |/
+ > A
+ > EOF
+ rebasing 2:b18e25de2cf5 "D" (D)
+ already rebased 3:26805aba1e60 "C" (C tip)
+ o 4: D
+ |
+ | o 3: C
+ |/
+ | x 2: D
+ | |
+ o | 1: B
+ |/
+ o 0: A
+
+Can collapse commits even if one is already in the right place
+
+ $ rebasewithdag --collapse -r C+D -d B <<EOF
+ > C
+ > |
+ > B D
+ > |/
+ > A
+ > EOF
+ rebasing 2:b18e25de2cf5 "D" (D)
+ rebasing 3:26805aba1e60 "C" (C tip)
+ o 4: Collapsed revision
+ | * D
+ | * C
+ | x 3: C
+ |/
+ | x 2: D
+ | |
+ o | 1: B
+ |/
+ o 0: A
+
+Rebase with "holes". The commits after the hole should end up on the parent of
+the hole (B below), not on top of the destination (A).
+
+ $ rebasewithdag -r B+D -d A <<EOF
+ > D
+ > |
+ > C
+ > |
+ > B
+ > |
+ > A
+ > EOF
+ already rebased 1:112478962961 "B" (B)
+ not rebasing ignored 2:26805aba1e60 "C" (C)
+ rebasing 3:f585351a92f8 "D" (D tip)
+ o 4: D
+ |
+ | x 3: D
+ | |
+ | o 2: C
+ |/
+ o 1: B
+ |
+ o 0: A
+
--- a/tests/test-rebase-pull.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-rebase-pull.t Tue Jun 20 16:33:46 2017 -0400
@@ -347,6 +347,7 @@
added 1 changesets with 1 changes to 1 files (+1 heads)
nothing to rebase - updating instead
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "65bc164c1d9b: R6"
1 other heads for branch "default"
$ hg tglog
@ 9: 'R6'
--- a/tests/test-rebuildstate.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-rebuildstate.t Tue Jun 20 16:33:46 2017 -0400
@@ -1,8 +1,8 @@
$ cat > adddrop.py <<EOF
- > from mercurial import cmdutil
+ > from mercurial import registrar
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
> @command('debugadddrop',
> [('', 'drop', False, 'drop file from dirstate', 'FILE'),
> ('', 'normal-lookup', False, 'add file to dirstate', 'FILE')],
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-releasenotes-formatting.t Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,326 @@
+ $ cat >> $HGRCPATH << EOF
+ > [extensions]
+ > releasenotes=
+ > EOF
+
+ $ hg init simple-repo
+ $ cd simple-repo
+
+A fix with a single line results in a bullet point in the appropriate section
+
+ $ touch fix1
+ $ hg -q commit -A -l - << EOF
+ > single line fix
+ >
+ > .. fix::
+ >
+ > Simple fix with a single line content entry.
+ > EOF
+
+ $ hg releasenotes -r . $TESTTMP/relnotes-single-line
+
+ $ cat $TESTTMP/relnotes-single-line
+ Bug Fixes
+ =========
+
+ * Simple fix with a single line content entry.
+
+A fix with multiple lines is handled correctly
+
+ $ touch fix2
+ $ hg -q commit -A -l - << EOF
+ > multi line fix
+ >
+ > .. fix::
+ >
+ > First line of fix entry.
+ > A line after it without a space.
+ >
+ > A new paragraph in the fix entry. And this is a really long line. It goes on for a while.
+ > And it wraps around to a new paragraph.
+ > EOF
+
+ $ hg releasenotes -r . $TESTTMP/relnotes-multi-line
+ $ cat $TESTTMP/relnotes-multi-line
+ Bug Fixes
+ =========
+
+ * First line of fix entry. A line after it without a space.
+
+ A new paragraph in the fix entry. And this is a really long line. It goes on
+ for a while. And it wraps around to a new paragraph.
+
+A release note with a title results in a sub-section being written
+
+ $ touch fix3
+ $ hg -q commit -A -l - << EOF
+ > fix with title
+ >
+ > .. fix:: Fix Title
+ >
+ > First line of fix with title.
+ >
+ > Another paragraph of fix with title. But this is a paragraph
+ > with multiple lines.
+ > EOF
+
+ $ hg releasenotes -r . $TESTTMP/relnotes-fix-with-title
+ $ cat $TESTTMP/relnotes-fix-with-title
+ Bug Fixes
+ =========
+
+ Fix Title
+ ---------
+
+ First line of fix with title.
+
+ Another paragraph of fix with title. But this is a paragraph with multiple
+ lines.
+
+ $ cd ..
+
+Formatting of multiple bullet points works
+
+ $ hg init multiple-bullets
+ $ cd multiple-bullets
+ $ touch fix1
+ $ hg -q commit -A -l - << EOF
+ > commit 1
+ >
+ > .. fix::
+ >
+ > first fix
+ > EOF
+
+ $ touch fix2
+ $ hg -q commit -A -l - << EOF
+ > commit 2
+ >
+ > .. fix::
+ >
+ > second fix
+ >
+ > Second paragraph of second fix.
+ > EOF
+
+ $ touch fix3
+ $ hg -q commit -A -l - << EOF
+ > commit 3
+ >
+ > .. fix::
+ >
+ > third fix
+ > EOF
+
+ $ hg releasenotes -r 'all()' $TESTTMP/relnotes-multiple-bullets
+ $ cat $TESTTMP/relnotes-multiple-bullets
+ Bug Fixes
+ =========
+
+ * first fix
+
+ * second fix
+
+ Second paragraph of second fix.
+
+ * third fix
+
+ $ cd ..
+
+Formatting of multiple sections works
+
+ $ hg init multiple-sections
+ $ cd multiple-sections
+ $ touch fix1
+ $ hg -q commit -A -l - << EOF
+ > commit 1
+ >
+ > .. fix::
+ >
+ > first fix
+ > EOF
+
+ $ touch feature1
+ $ hg -q commit -A -l - << EOF
+ > commit 2
+ >
+ > .. feature::
+ >
+ > description of the new feature
+ > EOF
+
+ $ touch fix2
+ $ hg -q commit -A -l - << EOF
+ > commit 3
+ >
+ > .. fix::
+ >
+ > second fix
+ > EOF
+
+ $ hg releasenotes -r 'all()' $TESTTMP/relnotes-multiple-sections
+ $ cat $TESTTMP/relnotes-multiple-sections
+ New Features
+ ============
+
+ * description of the new feature
+
+ Bug Fixes
+ =========
+
+ * first fix
+
+ * second fix
+
+ $ cd ..
+
+Section with subsections and bullets
+
+ $ hg init multiple-subsections
+ $ cd multiple-subsections
+
+ $ touch fix1
+ $ hg -q commit -A -l - << EOF
+ > commit 1
+ >
+ > .. fix:: Title of First Fix
+ >
+ > First paragraph of first fix.
+ >
+ > Second paragraph of first fix.
+ > EOF
+
+ $ touch fix2
+ $ hg -q commit -A -l - << EOF
+ > commit 2
+ >
+ > .. fix:: Title of Second Fix
+ >
+ > First paragraph of second fix.
+ >
+ > Second paragraph of second fix.
+ > EOF
+
+ $ hg releasenotes -r 'all()' $TESTTMP/relnotes-multiple-subsections
+ $ cat $TESTTMP/relnotes-multiple-subsections
+ Bug Fixes
+ =========
+
+ Title of First Fix
+ ------------------
+
+ First paragraph of first fix.
+
+ Second paragraph of first fix.
+
+ Title of Second Fix
+ -------------------
+
+ First paragraph of second fix.
+
+ Second paragraph of second fix.
+
+Now add bullet points to sections having sub-sections
+
+ $ touch fix3
+ $ hg -q commit -A -l - << EOF
+ > commit 3
+ >
+ > .. fix::
+ >
+ > Short summary of fix 3
+ > EOF
+
+ $ hg releasenotes -r 'all()' $TESTTMP/relnotes-multiple-subsections-with-bullets
+ $ cat $TESTTMP/relnotes-multiple-subsections-with-bullets
+ Bug Fixes
+ =========
+
+ Title of First Fix
+ ------------------
+
+ First paragraph of first fix.
+
+ Second paragraph of first fix.
+
+ Title of Second Fix
+ -------------------
+
+ First paragraph of second fix.
+
+ Second paragraph of second fix.
+
+ Other Changes
+ -------------
+
+ * Short summary of fix 3
+
+Multiple 'Other Changes' sub-sections for every section
+
+ $ hg init multiple-otherchanges
+ $ cd multiple-otherchanges
+
+ $ touch fix1
+ $ hg -q commit -A -l - << EOF
+ > commit 1
+ >
+ > .. fix:: Title of First Fix
+ >
+ > First paragraph of fix 1.
+ > EOF
+
+ $ touch feature1
+ $ hg -q commit -A -l - << EOF
+ > commit 2
+ >
+ > .. feature:: Title of First Feature
+ >
+ > First paragraph of feature 1.
+ > EOF
+
+ $ touch feature2
+ $ hg -q commit -A -l - << EOF
+ > commit 3
+ >
+ > .. feature::
+ >
+ > Short summary of feature 2.
+ > EOF
+
+ $ touch fix2
+ $ hg -q commit -A -l - << EOF
+ > commit 4
+ >
+ > .. fix::
+ >
+ > Short summary of fix 2
+ > EOF
+
+ $ hg releasenotes -r 'all()' $TESTTMP/relnotes-multiple-otherchanges
+ $ cat $TESTTMP/relnotes-multiple-otherchanges
+ New Features
+ ============
+
+ Title of First Feature
+ ----------------------
+
+ First paragraph of feature 1.
+
+ Other Changes
+ -------------
+
+ * Short summary of feature 2.
+
+ Bug Fixes
+ =========
+
+ Title of First Fix
+ ------------------
+
+ First paragraph of fix 1.
+
+ Other Changes
+ -------------
+
+ * Short summary of fix 2
+
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-releasenotes-merging.t Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,163 @@
+ $ cat >> $HGRCPATH << EOF
+ > [extensions]
+ > releasenotes=
+ > EOF
+
+ $ hg init simple-repo
+ $ cd simple-repo
+
+A fix directive from commit message is added to release notes
+
+ $ touch fix1
+ $ hg -q commit -A -l - << EOF
+ > commit 1
+ >
+ > .. fix::
+ >
+ > Fix from commit message.
+ > EOF
+
+ $ cat >> $TESTTMP/single-fix-bullet << EOF
+ > Bug Fixes
+ > =========
+ >
+ > * Fix from release notes.
+ > EOF
+
+ $ hg releasenotes -r . $TESTTMP/single-fix-bullet
+
+ $ cat $TESTTMP/single-fix-bullet
+ Bug Fixes
+ =========
+
+ * Fix from release notes.
+
+ * Fix from commit message.
+
+Processing again will no-op
+TODO this is buggy
+
+ $ hg releasenotes -r . $TESTTMP/single-fix-bullet
+
+ $ cat $TESTTMP/single-fix-bullet
+ Bug Fixes
+ =========
+
+ * Fix from release notes.
+
+ Fix from commit message.
+
+ * Fix from commit message.
+
+ $ cd ..
+
+Sections are unioned
+
+ $ hg init subsections
+ $ cd subsections
+ $ touch fix1
+ $ hg -q commit -A -l - << EOF
+ > Commit 1
+ >
+ > .. feature:: Commit Message Feature
+ >
+ > This describes a feature from a commit message.
+ > EOF
+
+ $ cat >> $TESTTMP/single-feature-section << EOF
+ > New Features
+ > ============
+ >
+ > Notes Feature
+ > -------------
+ >
+ > This describes a feature from a release notes file.
+ > EOF
+
+ $ hg releasenotes -r . $TESTTMP/single-feature-section
+
+ $ cat $TESTTMP/single-feature-section
+ New Features
+ ============
+
+ Notes Feature
+ -------------
+
+ This describes a feature from a release notes file.
+
+ Commit Message Feature
+ ----------------------
+
+ This describes a feature from a commit message.
+
+Doing it again won't add another section
+
+ $ hg releasenotes -r . $TESTTMP/single-feature-section
+ Commit Message Feature already exists in feature section; ignoring
+
+ $ cat $TESTTMP/single-feature-section
+ New Features
+ ============
+
+ Notes Feature
+ -------------
+
+ This describes a feature from a release notes file.
+
+ Commit Message Feature
+ ----------------------
+
+ This describes a feature from a commit message.
+
+ $ cd ..
+
+Bullets don't merge properly
+
+ $ hg init bullets
+ $ cd bullets
+ $ touch fix1
+ $ hg -q commit -A -l - << EOF
+ > commit 1
+ >
+ > .. fix::
+ >
+ > this is fix1.
+ > EOF
+
+ $ touch fix2
+ $ hg -q commit -A -l - << EOF
+ > commit 2
+ >
+ > .. fix::
+ >
+ > this is fix2.
+ > EOF
+
+ $ hg releasenotes -r 'all()' $TESTTMP/relnotes-bullet-problem
+ $ cat $TESTTMP/relnotes-bullet-problem
+ Bug Fixes
+ =========
+
+ * this is fix1.
+
+ * this is fix2.
+ $ touch fix3
+ $ hg -q commit -A -l - << EOF
+ > commit 3
+ >
+ > .. fix::
+ >
+ > this is fix3.
+ > EOF
+
+ $ hg releasenotes -r . $TESTTMP/relnotes-bullet-problem
+ $ cat $TESTTMP/relnotes-bullet-problem
+ Bug Fixes
+ =========
+
+ * this is fix1.
+
+ this is fix2.
+
+ * this is fix3.
+
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-releasenotes-parsing.t Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,169 @@
+ $ cat >> $HGRCPATH << EOF
+ > [extensions]
+ > releasenotes=
+ > EOF
+
+Bullet point with a single item spanning a single line
+
+ $ hg debugparsereleasenotes - << EOF
+ > New Features
+ > ============
+ >
+ > * Bullet point item with a single line
+ > EOF
+ section: feature
+ bullet point:
+ paragraph: Bullet point item with a single line
+
+Bullet point that spans multiple lines.
+
+ $ hg debugparsereleasenotes - << EOF
+ > New Features
+ > ============
+ >
+ > * Bullet point with a paragraph
+ > that spans multiple lines.
+ > EOF
+ section: feature
+ bullet point:
+ paragraph: Bullet point with a paragraph that spans multiple lines.
+
+ $ hg debugparsereleasenotes - << EOF
+ > New Features
+ > ============
+ >
+ > * Bullet point with a paragraph
+ > that spans multiple lines.
+ >
+ > And has an empty line between lines too.
+ > With a line cuddling that.
+ > EOF
+ section: feature
+ bullet point:
+ paragraph: Bullet point with a paragraph that spans multiple lines.
+ paragraph: And has an empty line between lines too. With a line cuddling that.
+
+Multiple bullet points. With some entries being multiple lines.
+
+ $ hg debugparsereleasenotes - << EOF
+ > New Features
+ > ============
+ >
+ > * First bullet point. It has a single line.
+ >
+ > * Second bullet point.
+ > It consists of multiple lines.
+ >
+ > * Third bullet point. It has a single line.
+ > EOF
+ section: feature
+ bullet point:
+ paragraph: First bullet point. It has a single line.
+ paragraph: Second bullet point. It consists of multiple lines.
+ paragraph: Third bullet point. It has a single line.
+
+Bullet point without newline between items
+
+ $ hg debugparsereleasenotes - << EOF
+ > New Features
+ > ============
+ >
+ > * First bullet point
+ > * Second bullet point
+ > And it has multiple lines
+ > * Third bullet point
+ > * Fourth bullet point
+ > EOF
+ section: feature
+ bullet point:
+ paragraph: First bullet point
+ paragraph: Second bullet point And it has multiple lines
+ paragraph: Third bullet point
+ paragraph: Fourth bullet point
+
+Sub-section contents are read
+
+ $ hg debugparsereleasenotes - << EOF
+ > New Features
+ > ============
+ >
+ > First Feature
+ > -------------
+ >
+ > This is the first new feature that was implemented.
+ >
+ > And a second paragraph about it.
+ >
+ > Second Feature
+ > --------------
+ >
+ > This is the second new feature that was implemented.
+ >
+ > Paragraph two.
+ >
+ > Paragraph three.
+ > EOF
+ section: feature
+ subsection: First Feature
+ paragraph: This is the first new feature that was implemented.
+ paragraph: And a second paragraph about it.
+ subsection: Second Feature
+ paragraph: This is the second new feature that was implemented.
+ paragraph: Paragraph two.
+ paragraph: Paragraph three.
+
+Multiple sections are read
+
+ $ hg debugparsereleasenotes - << EOF
+ > New Features
+ > ============
+ >
+ > * Feature 1
+ > * Feature 2
+ >
+ > Bug Fixes
+ > =========
+ >
+ > * Fix 1
+ > * Fix 2
+ > EOF
+ section: feature
+ bullet point:
+ paragraph: Feature 1
+ paragraph: Feature 2
+ section: fix
+ bullet point:
+ paragraph: Fix 1
+ paragraph: Fix 2
+
+Mixed sub-sections and bullet list
+
+ $ hg debugparsereleasenotes - << EOF
+ > New Features
+ > ============
+ >
+ > Feature 1
+ > ---------
+ >
+ > Some words about the first feature.
+ >
+ > Feature 2
+ > ---------
+ >
+ > Some words about the second feature.
+ > That span multiple lines.
+ >
+ > Other Changes
+ > -------------
+ >
+ > * Bullet item 1
+ > * Bullet item 2
+ > EOF
+ section: feature
+ subsection: Feature 1
+ paragraph: Some words about the first feature.
+ subsection: Feature 2
+ paragraph: Some words about the second feature. That span multiple lines.
+ bullet point:
+ paragraph: Bullet item 1
+ paragraph: Bullet item 2
--- a/tests/test-relink.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-relink.t Tue Jun 20 16:33:46 2017 -0400
@@ -93,8 +93,8 @@
check hardlinks
- $ python arelinked.py repo/.hg/store/data/a.i clone/.hg/store/data/a.i
+ $ $PYTHON arelinked.py repo/.hg/store/data/a.i clone/.hg/store/data/a.i
repo/.hg/store/data/a.i == clone/.hg/store/data/a.i
- $ python arelinked.py repo/.hg/store/data/b.i clone/.hg/store/data/b.i
+ $ $PYTHON arelinked.py repo/.hg/store/data/b.i clone/.hg/store/data/b.i
repo/.hg/store/data/b.i != clone/.hg/store/data/b.i
--- a/tests/test-rename-merge2.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-rename-merge2.t Tue Jun 20 16:33:46 2017 -0400
@@ -89,7 +89,6 @@
preserving a for resolve of b
preserving rev for resolve of rev
starting 4 threads for background file closing (?)
- a: remote unchanged -> k
b: remote copied from a -> m (premerge)
picked tool 'python ../merge' for b (binary False symlink False changedelete False)
merging a and b to b
@@ -652,7 +651,6 @@
preserving b for resolve of b
preserving rev for resolve of rev
starting 4 threads for background file closing (?)
- a: remote unchanged -> k
b: both created -> m (premerge)
picked tool 'python ../merge' for b (binary False symlink False changedelete False)
merging b
--- a/tests/test-repair-strip.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-repair-strip.t Tue Jun 20 16:33:46 2017 -0400
@@ -21,7 +21,7 @@
> hg verify
> echo % journal contents
> if [ -f .hg/store/journal ]; then
- > cat .hg/store/journal | python $TESTTMP/dumpjournal.py
+ > cat .hg/store/journal | $PYTHON $TESTTMP/dumpjournal.py
> else
> echo "(no journal)"
> fi
--- a/tests/test-requires.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-requires.t Tue Jun 20 16:33:46 2017 -0400
@@ -5,7 +5,7 @@
$ hg commit -m test
$ rm .hg/requires
$ hg tip
- abort: index 00changelog.i unknown format 2!
+ abort: unknown version (2) in revlog 00changelog.i!
[255]
$ echo indoor-pool > .hg/requires
$ hg tip
@@ -37,7 +37,7 @@
> for name, module in extensions.extensions(ui):
> if __name__ == module.__name__:
> # support specific feature locally
- > supported |= set(['featuresetup-test'])
+ > supported |= {'featuresetup-test'}
> return
> def uisetup(ui):
> localrepo.localrepository.featuresetupfuncs.add(featuresetup)
--- a/tests/test-resolve.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-resolve.t Tue Jun 20 16:33:46 2017 -0400
@@ -85,9 +85,9 @@
$ cat > $TESTTMP/markdriver.py << EOF
> '''mark and unmark files as driver-resolved'''
- > from mercurial import cmdutil, merge, scmutil
+ > from mercurial import merge, registrar, scmutil
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
> @command('markdriver',
> [('u', 'unmark', None, '')],
> 'FILE...')
--- a/tests/test-revert.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-revert.t Tue Jun 20 16:33:46 2017 -0400
@@ -495,7 +495,7 @@
check list of planned files
- $ python $TESTDIR/generate-working-copy-states.py filelist 2
+ $ $PYTHON $TESTDIR/generate-working-copy-states.py filelist 2
content1_content1_content1-tracked
content1_content1_content1-untracked
content1_content1_content3-tracked
@@ -550,7 +550,7 @@
Generate base changeset
- $ python $TESTDIR/generate-working-copy-states.py state 2 1
+ $ $PYTHON $TESTDIR/generate-working-copy-states.py state 2 1
$ hg addremove --similarity 0
adding content1_content1_content1-tracked
adding content1_content1_content1-untracked
@@ -597,7 +597,7 @@
(create a simple text version of the content)
- $ python ../dircontent.py > ../content-base.txt
+ $ $PYTHON ../dircontent.py > ../content-base.txt
$ cat ../content-base.txt
content1 content1_content1_content1-tracked
content1 content1_content1_content1-untracked
@@ -622,7 +622,7 @@
Create parent changeset
- $ python $TESTDIR/generate-working-copy-states.py state 2 2
+ $ $PYTHON $TESTDIR/generate-working-copy-states.py state 2 2
$ hg addremove --similarity 0
removing content1_missing_content1-tracked
removing content1_missing_content1-untracked
@@ -661,7 +661,7 @@
(create a simple text version of the content)
- $ python ../dircontent.py > ../content-parent.txt
+ $ $PYTHON ../dircontent.py > ../content-parent.txt
$ cat ../content-parent.txt
content1 content1_content1_content1-tracked
content1 content1_content1_content1-untracked
@@ -686,7 +686,7 @@
Setup working directory
- $ python $TESTDIR/generate-working-copy-states.py state 2 wc
+ $ $PYTHON $TESTDIR/generate-working-copy-states.py state 2 wc
$ hg addremove --similarity 0
adding content1_missing_content1-tracked
adding content1_missing_content1-untracked
@@ -754,7 +754,7 @@
(create a simple text version of the content)
- $ python ../dircontent.py > ../content-wc.txt
+ $ $PYTHON ../dircontent.py > ../content-wc.txt
$ cat ../content-wc.txt
content1 content1_content1_content1-tracked
content1 content1_content1_content1-untracked
@@ -818,7 +818,7 @@
The diff is filtered to include change only. The only difference should be
additional `.orig` backup file when applicable.
- $ python ../dircontent.py > ../content-parent-all.txt
+ $ $PYTHON ../dircontent.py > ../content-parent-all.txt
$ cd ..
$ diff -U 0 -- content-parent.txt content-parent-all.txt | grep _
+content3 content1_content1_content3-tracked.orig
@@ -875,7 +875,7 @@
The diff is filtered to include change only. The only difference should be
additional `.orig` backup file when applicable.
- $ python ../dircontent.py > ../content-base-all.txt
+ $ $PYTHON ../dircontent.py > ../content-base-all.txt
$ cd ..
$ diff -U 0 -- content-base.txt content-base-all.txt | grep _
+content3 content1_content1_content3-tracked.orig
@@ -902,7 +902,7 @@
revert all files individually and check the output
(output is expected to be different than in the --all case)
- $ for file in `python $TESTDIR/generate-working-copy-states.py filelist 2`; do
+ $ for file in `$PYTHON $TESTDIR/generate-working-copy-states.py filelist 2`; do
> echo '### revert for:' $file;
> hg revert $file;
> echo
@@ -979,7 +979,7 @@
check resulting directory against the --all run
(There should be no difference)
- $ python ../dircontent.py > ../content-parent-explicit.txt
+ $ $PYTHON ../dircontent.py > ../content-parent-explicit.txt
$ cd ..
$ diff -U 0 -- content-parent-all.txt content-parent-explicit.txt | grep _
[1]
@@ -995,7 +995,7 @@
revert all files individually and check the output
(output is expected to be different than in the --all case)
- $ for file in `python $TESTDIR/generate-working-copy-states.py filelist 2`; do
+ $ for file in `$PYTHON $TESTDIR/generate-working-copy-states.py filelist 2`; do
> echo '### revert for:' $file;
> hg revert $file --rev 'desc(base)';
> echo
@@ -1072,7 +1072,7 @@
check resulting directory against the --all run
(There should be no difference)
- $ python ../dircontent.py > ../content-base-explicit.txt
+ $ $PYTHON ../dircontent.py > ../content-base-explicit.txt
$ cd ..
$ diff -U 0 -- content-base-all.txt content-base-explicit.txt | grep _
[1]
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-revlog-v2.t Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,62 @@
+A repo with unknown revlogv2 requirement string cannot be opened
+
+ $ hg init invalidreq
+ $ cd invalidreq
+ $ echo exp-revlogv2.unknown >> .hg/requires
+ $ hg log
+ abort: repository requires features unknown to this Mercurial: exp-revlogv2.unknown!
+ (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
+ [255]
+ $ cd ..
+
+Can create and open repo with revlog v2 requirement
+
+ $ cat >> $HGRCPATH << EOF
+ > [experimental]
+ > revlogv2 = enable-unstable-format-and-corrupt-my-data
+ > EOF
+
+ $ hg init empty-repo
+ $ cd empty-repo
+ $ cat .hg/requires
+ dotencode
+ exp-revlogv2.0
+ fncache
+ store
+
+ $ hg log
+
+Unknown flags to revlog are rejected
+
+ >>> with open('.hg/store/00changelog.i', 'wb') as fh:
+ ... fh.write('\x00\x04\xde\xad')
+
+ $ hg log
+ abort: unknown flags (0x04) in version 57005 revlog 00changelog.i!
+ [255]
+
+ $ cd ..
+
+Writing a simple revlog v2 works
+
+ $ hg init simple
+ $ cd simple
+ $ touch foo
+ $ hg -q commit -A -m initial
+
+ $ hg log
+ changeset: 0:96ee1d7354c4
+ tag: tip
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: initial
+
+Header written as expected (changelog always disables generaldelta)
+
+ $ f --hexdump --bytes 4 .hg/store/00changelog.i
+ .hg/store/00changelog.i:
+ 0000: 00 01 de ad |....|
+
+ $ f --hexdump --bytes 4 .hg/store/data/foo.i
+ .hg/store/data/foo.i:
+ 0000: 00 03 de ad |....|
--- a/tests/test-revlog.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-revlog.t Tue Jun 20 16:33:46 2017 -0400
@@ -1,3 +1,35 @@
+ $ hg init empty-repo
+ $ cd empty-repo
+
+Flags on revlog version 0 are rejected
+
+ >>> with open('.hg/store/00changelog.i', 'wb') as fh:
+ ... fh.write('\x00\x01\x00\x00')
+
+ $ hg log
+ abort: unknown flags (0x01) in version 0 revlog 00changelog.i!
+ [255]
+
+Unknown flags on revlog version 1 are rejected
+
+ >>> with open('.hg/store/00changelog.i', 'wb') as fh:
+ ... fh.write('\x00\x04\x00\x01')
+
+ $ hg log
+ abort: unknown flags (0x04) in version 1 revlog 00changelog.i!
+ [255]
+
+Unknown version is rejected
+
+ >>> with open('.hg/store/00changelog.i', 'wb') as fh:
+ ... fh.write('\x00\x00\x00\x02')
+
+ $ hg log
+ abort: unknown version (2) in revlog 00changelog.i!
+ [255]
+
+ $ cd ..
+
Test for CVE-2016-3630
$ hg init
@@ -12,4 +44,4 @@
0 0 19 -1 2 99e0332bd498 000000000000 000000000000
1 19 12 0 3 6674f57a23d8 99e0332bd498 000000000000
$ hg debugdata a.i 1 2>&1 | egrep 'Error:.*decoded'
- (mercurial.mpatch.)?mpatchError: patch cannot be decoded (re)
+ (mercurial\.\w+\.mpatch\.)?mpatchError: patch cannot be decoded (re)
--- a/tests/test-revset.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-revset.t Tue Jun 20 16:33:46 2017 -0400
@@ -37,14 +37,14 @@
$ cat <<EOF > debugrevlistspec.py
> from __future__ import absolute_import
> from mercurial import (
- > cmdutil,
> node as nodemod,
+ > registrar,
> revset,
> revsetlang,
> smartset,
> )
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
> @command('debugrevlistspec',
> [('', 'optimize', None, 'print parsed tree after optimizing'),
> ('', 'bin', None, 'unhexlify arguments')])
@@ -157,7 +157,7 @@
('symbol', '0')
('symbol', '1'))
* set:
- <spanset+ 0:1>
+ <spanset+ 0:2>
0
1
$ try --optimize :
@@ -168,7 +168,7 @@
None
define)
* set:
- <spanset+ 0:9>
+ <spanset+ 0:10>
0
1
2
@@ -266,7 +266,7 @@
(rangepost
('symbol', '+a+b+c+'))
* set:
- <spanset+ 3:9>
+ <spanset+ 3:10>
3
4
5
@@ -278,7 +278,7 @@
(rangepre
('symbol', '+a+b+c+'))
* set:
- <spanset+ 0:3>
+ <spanset+ 0:4>
0
1
2
@@ -288,7 +288,7 @@
('symbol', '-a-b-c-')
('symbol', '+a+b+c+'))
* set:
- <spanset- 3:4>
+ <spanset- 3:5>
4
3
$ log '-a-b-c-:+a+b+c+'
@@ -413,7 +413,7 @@
hg: parse error: invalid \x escape
[255]
$ log 'date(tip)'
- abort: invalid date: 'tip'
+ hg: parse error: invalid date: 'tip'
[255]
$ log '0:date'
abort: unknown revision 'date'!
@@ -626,7 +626,7 @@
None
define)
* set:
- <spanset+ 0:9>
+ <spanset+ 0:10>
0
1
2
@@ -643,7 +643,7 @@
('symbol', '1')
define)
* set:
- <spanset+ 0:1>
+ <spanset+ 0:2>
0
1
$ try -p analyzed ':(1|2)'
@@ -656,7 +656,7 @@
define)
define)
* set:
- <spanset+ 0:2>
+ <spanset+ 0:3>
0
1
2
@@ -681,7 +681,7 @@
('symbol', '1'))
('symbol', '2'))
* set:
- <spanset+ 0:2>
+ <spanset+ 0:3>
0
1
2
@@ -702,7 +702,7 @@
(parentpost
('symbol', '9')))
* set:
- <spanset+ 8:9>
+ <spanset+ 8:10>
8
9
@@ -727,7 +727,7 @@
('symbol', '1'))
('symbol', '2')))
* set:
- <spanset+ 0:2>
+ <spanset+ 0:3>
0
1
2
@@ -742,7 +742,7 @@
('symbol', '4'))))
('symbol', '2'))
* set:
- <spanset+ 0:2>
+ <spanset+ 0:3>
0
1
2
@@ -770,7 +770,7 @@
(parentpost
('symbol', '9'))))))
* set:
- <spanset+ 4:9>
+ <spanset+ 4:10>
4
5
6
@@ -788,7 +788,7 @@
('symbol', '1'))
('symbol', '2'))
* set:
- <spanset+ 0:2>
+ <spanset+ 0:3>
0
1
2
@@ -803,7 +803,7 @@
('symbol', '1'))
('symbol', '2'))
* set:
- <spanset+ 0:2>
+ <spanset+ 0:3>
0
1
2
@@ -957,7 +957,7 @@
('string', '\x08issue\\d+'))
* set:
<filteredset
- <fullreposet+ 0:9>,
+ <fullreposet+ 0:10>,
<grep '\x08issue\\d+'>>
$ try 'grep(r"\bissue\d+")'
(func
@@ -965,7 +965,7 @@
('string', '\\bissue\\d+'))
* set:
<filteredset
- <fullreposet+ 0:9>,
+ <fullreposet+ 0:10>,
<grep '\\bissue\\d+'>>
6
$ try 'grep(r"\")'
@@ -986,6 +986,9 @@
$ log 'keyword(issue)'
6
$ log 'keyword("test a")'
+
+Test first (=limit) and last
+
$ log 'limit(head(), 1)'
0
$ log 'limit(author("re:bob|test"), 3, 5)'
@@ -998,6 +1001,163 @@
$ log 'limit(all(), 1, -1)'
hg: parse error: negative offset
[255]
+ $ log 'limit(all(), -1)'
+ hg: parse error: negative number to select
+ [255]
+ $ log 'limit(all(), 0)'
+
+ $ log 'last(all(), -1)'
+ hg: parse error: negative number to select
+ [255]
+ $ log 'last(all(), 0)'
+ $ log 'last(all(), 1)'
+ 9
+ $ log 'last(all(), 2)'
+ 8
+ 9
+
+Test smartset.slice() by first/last()
+
+ (using unoptimized set, filteredset as example)
+
+ $ hg debugrevspec --no-show-revs -s '0:7 & branch("re:")'
+ * set:
+ <filteredset
+ <spanset+ 0:8>,
+ <branch 're:'>>
+ $ log 'limit(0:7 & branch("re:"), 3, 4)'
+ 4
+ 5
+ 6
+ $ log 'limit(7:0 & branch("re:"), 3, 4)'
+ 3
+ 2
+ 1
+ $ log 'last(0:7 & branch("re:"), 2)'
+ 6
+ 7
+
+ (using baseset)
+
+ $ hg debugrevspec --no-show-revs -s 0+1+2+3+4+5+6+7
+ * set:
+ <baseset [0, 1, 2, 3, 4, 5, 6, 7]>
+ $ hg debugrevspec --no-show-revs -s 0::7
+ * set:
+ <baseset+ [0, 1, 2, 3, 4, 5, 6, 7]>
+ $ log 'limit(0+1+2+3+4+5+6+7, 3, 4)'
+ 4
+ 5
+ 6
+ $ log 'limit(sort(0::7, rev), 3, 4)'
+ 4
+ 5
+ 6
+ $ log 'limit(sort(0::7, -rev), 3, 4)'
+ 3
+ 2
+ 1
+ $ log 'last(sort(0::7, rev), 2)'
+ 6
+ 7
+ $ hg debugrevspec -s 'limit(sort(0::7, rev), 3, 6)'
+ * set:
+ <baseset+ [6, 7]>
+ 6
+ 7
+ $ hg debugrevspec -s 'limit(sort(0::7, rev), 3, 9)'
+ * set:
+ <baseset+ []>
+ $ hg debugrevspec -s 'limit(sort(0::7, -rev), 3, 6)'
+ * set:
+ <baseset- [0, 1]>
+ 1
+ 0
+ $ hg debugrevspec -s 'limit(sort(0::7, -rev), 3, 9)'
+ * set:
+ <baseset- []>
+ $ hg debugrevspec -s 'limit(0::7, 0)'
+ * set:
+ <baseset+ []>
+
+ (using spanset)
+
+ $ hg debugrevspec --no-show-revs -s 0:7
+ * set:
+ <spanset+ 0:8>
+ $ log 'limit(0:7, 3, 4)'
+ 4
+ 5
+ 6
+ $ log 'limit(7:0, 3, 4)'
+ 3
+ 2
+ 1
+ $ log 'limit(0:7, 3, 6)'
+ 6
+ 7
+ $ log 'limit(7:0, 3, 6)'
+ 1
+ 0
+ $ log 'last(0:7, 2)'
+ 6
+ 7
+ $ hg debugrevspec -s 'limit(0:7, 3, 6)'
+ * set:
+ <spanset+ 6:8>
+ 6
+ 7
+ $ hg debugrevspec -s 'limit(0:7, 3, 9)'
+ * set:
+ <spanset+ 8:8>
+ $ hg debugrevspec -s 'limit(7:0, 3, 6)'
+ * set:
+ <spanset- 0:2>
+ 1
+ 0
+ $ hg debugrevspec -s 'limit(7:0, 3, 9)'
+ * set:
+ <spanset- 0:0>
+ $ hg debugrevspec -s 'limit(0:7, 0)'
+ * set:
+ <spanset+ 0:0>
+
+Test order of first/last revisions
+
+ $ hg debugrevspec -s 'first(4:0, 3) & 3:'
+ * set:
+ <filteredset
+ <spanset- 2:5>,
+ <spanset+ 3:10>>
+ 4
+ 3
+
+ $ hg debugrevspec -s '3: & first(4:0, 3)'
+ * set:
+ <filteredset
+ <spanset+ 3:10>,
+ <spanset- 2:5>>
+ 3
+ 4
+
+ $ hg debugrevspec -s 'last(4:0, 3) & :1'
+ * set:
+ <filteredset
+ <spanset- 0:3>,
+ <spanset+ 0:2>>
+ 1
+ 0
+
+ $ hg debugrevspec -s ':1 & last(4:0, 3)'
+ * set:
+ <filteredset
+ <spanset+ 0:2>,
+ <spanset+ 0:3>>
+ 0
+ 1
+
+Test matching
+
$ log 'matching(6)'
6
$ log 'matching(6:7, "phase parents user date branch summary files description substate")'
@@ -1210,10 +1370,10 @@
$ log 'reverse(null:)' | tail -2
0
-1
+ $ log 'first(null:)'
+ -1
+ $ log 'min(null:)'
BROKEN: should be '-1'
- $ log 'first(null:)'
-BROKEN: should be '-1'
- $ log 'min(null:)'
$ log 'tip:null and all()' | tail -2
1
0
@@ -1221,6 +1381,42 @@
Test working-directory revision
$ hg debugrevspec 'wdir()'
2147483647
+ $ hg debugrevspec 'wdir()^'
+ 9
+ $ hg up 7
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg debugrevspec 'wdir()^'
+ 7
+ $ hg debugrevspec 'wdir()^0'
+ 2147483647
+ $ hg debugrevspec 'wdir()~3'
+ 5
+ $ hg debugrevspec 'ancestors(wdir())'
+ 0
+ 1
+ 2
+ 3
+ 4
+ 5
+ 6
+ 7
+ 2147483647
+ $ hg debugrevspec 'wdir()~0'
+ 2147483647
+ $ hg debugrevspec 'p1(wdir())'
+ 7
+ $ hg debugrevspec 'p2(wdir())'
+ $ hg debugrevspec 'parents(wdir())'
+ 7
+ $ hg debugrevspec 'wdir()^1'
+ 7
+ $ hg debugrevspec 'wdir()^2'
+ $ hg debugrevspec 'wdir()^3'
+ hg: parse error: ^ expects a number 0, 1, or 2
+ [255]
+For tests consistency
+ $ hg up 9
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg debugrevspec 'tip or wdir()'
9
2147483647
@@ -1239,9 +1435,103 @@
9
$ log '(all() + wdir()) & max(. + wdir())'
2147483647
- $ log '(all() + wdir()) & first(wdir() + .)'
+ $ log 'first(wdir() + .)'
+ 2147483647
+ $ log 'last(. + wdir())'
+ 2147483647
+
+Test working-directory integer revision and node id
+(BUG: '0:wdir()' is still needed to populate wdir revision)
+
+ $ hg debugrevspec '0:wdir() & 2147483647'
+ 2147483647
+ $ hg debugrevspec '0:wdir() & rev(2147483647)'
+ 2147483647
+ $ hg debugrevspec '0:wdir() & ffffffffffffffffffffffffffffffffffffffff'
+ 2147483647
+ $ hg debugrevspec '0:wdir() & ffffffffffff'
+ 2147483647
+ $ hg debugrevspec '0:wdir() & id(ffffffffffffffffffffffffffffffffffffffff)'
+ 2147483647
+ $ hg debugrevspec '0:wdir() & id(ffffffffffff)'
2147483647
- $ log '(all() + wdir()) & last(. + wdir())'
+
+ $ cd ..
+
+Test short 'ff...' hash collision
+(BUG: '0:wdir()' is still needed to populate wdir revision)
+
+ $ hg init wdir-hashcollision
+ $ cd wdir-hashcollision
+ $ cat <<EOF >> .hg/hgrc
+ > [experimental]
+ > evolution = createmarkers
+ > EOF
+ $ echo 0 > a
+ $ hg ci -qAm 0
+ $ for i in 2463 2961 6726 78127; do
+ > hg up -q 0
+ > echo $i > a
+ > hg ci -qm $i
+ > done
+ $ hg up -q null
+ $ hg log -r '0:wdir()' -T '{rev}:{node} {shortest(node, 3)}\n'
+ 0:b4e73ffab476aa0ee32ed81ca51e07169844bc6a b4e
+ 1:fffbae3886c8fbb2114296380d276fd37715d571 fffba
+ 2:fffb6093b00943f91034b9bdad069402c834e572 fffb6
+ 3:fff48a9b9de34a4d64120c29548214c67980ade3 fff4
+ 4:ffff85cff0ff78504fcdc3c0bc10de0c65379249 ffff8
+ 2147483647:ffffffffffffffffffffffffffffffffffffffff fffff
+ $ hg debugobsolete fffbae3886c8fbb2114296380d276fd37715d571
+
+ $ hg debugrevspec '0:wdir() & fff'
+ abort: 00changelog.i@fff: ambiguous identifier!
+ [255]
+ $ hg debugrevspec '0:wdir() & ffff'
+ abort: 00changelog.i@ffff: ambiguous identifier!
+ [255]
+ $ hg debugrevspec '0:wdir() & fffb'
+ abort: 00changelog.i@fffb: ambiguous identifier!
+ [255]
+BROKEN should be '2' (node lookup uses unfiltered repo since dc25ed84bee8)
+ $ hg debugrevspec '0:wdir() & id(fffb)'
+ 2
+ $ hg debugrevspec '0:wdir() & ffff8'
+ 4
+ $ hg debugrevspec '0:wdir() & fffff'
+ 2147483647
+
+ $ cd ..
+
+Test branch() with wdir()
+
+ $ cd repo
+
+ $ log '0:wdir() & branch("literal:é")'
+ 8
+ 9
+ 2147483647
+ $ log '0:wdir() & branch("re:é")'
+ 8
+ 9
+ 2147483647
+ $ log '0:wdir() & branch("re:^a")'
+ 0
+ 2
+ $ log '0:wdir() & branch(8)'
+ 8
+ 9
+ 2147483647
+
+branch(wdir()) returns all revisions belonging to the working branch. The wdir
+itself isn't returned unless it is explicitly populated.
+
+ $ log 'branch(wdir())'
+ 8
+ 9
+ $ log '0:wdir() & branch(wdir())'
+ 8
+ 9
2147483647
$ log 'outgoing()'
@@ -1358,10 +1648,10 @@
* set:
<filteredset
<filteredset
- <spanset- 0:3>,
- <spanset+ 0:3>>,
+ <spanset- 0:4>,
+ <spanset+ 0:4>>,
<not
- <spanset+ 1:2>>>
+ <spanset+ 1:3>>>
3
0
@@ -1392,7 +1682,7 @@
define)
* set:
<filteredset
- <spanset- 0:2>,
+ <spanset- 0:3>,
<baseset [0, 1, 2]>>
2
1
@@ -1429,10 +1719,10 @@
define)
* set:
<filteredset
- <spanset- 0:2>,
+ <spanset- 0:3>,
<addset
<baseset [2]>,
- <spanset+ 0:1>>>
+ <spanset+ 0:2>>>
2
1
0
@@ -1460,7 +1750,7 @@
define)
* set:
<filteredset
- <spanset- 0:2>,
+ <spanset- 0:3>,
<baseset+ [0, 1, 2]>>
2
1
@@ -1488,7 +1778,7 @@
* set:
<filteredset
<baseset [0, 2, 1]>,
- <spanset- 0:2>>
+ <spanset- 0:3>>
0
2
1
@@ -1516,7 +1806,7 @@
define)
* set:
<filteredset
- <spanset- 0:2>,
+ <spanset- 0:3>,
<baseset [0, 1, 2]>>
2
1
@@ -1564,7 +1854,7 @@
define)
* set:
<filteredset
- <spanset- 0:2>,
+ <spanset- 0:3>,
<not
<baseset [0, 1]>>>
2
@@ -1589,7 +1879,7 @@
define)
* set:
<filteredset
- <spanset- 0:2>,
+ <spanset- 0:3>,
<not
<baseset [0, 1]>>>
2
@@ -1640,7 +1930,7 @@
define)
* set:
<filteredset
- <spanset- 0:2>,
+ <spanset- 0:3>,
<baseset [0, 1, 2]>>
2
1
@@ -1674,8 +1964,8 @@
define)
* set:
<filteredset
- <spanset+ 0:2>,
- <spanset+ 0:9>>
+ <spanset+ 0:3>,
+ <spanset+ 0:10>>
0
1
2
@@ -1713,8 +2003,8 @@
define)
* set:
<filteredset
- <spanset+ 0:2>,
- <spanset+ 0:9>>
+ <spanset+ 0:3>,
+ <spanset+ 0:10>>
0
1
2
@@ -1757,10 +2047,9 @@
follow)
define)
* set:
- <baseset
- <limit n=1, offset=0,
- <spanset- 0:2>,
- <baseset [1, 0, 2]>>>
+ <filteredset
+ <baseset [1]>,
+ <spanset- 0:3>>
1
$ try --optimize '2:0 & not last(0 + 2 + 1)'
@@ -1792,12 +2081,9 @@
define)
* set:
<filteredset
- <spanset- 0:2>,
+ <spanset- 0:3>,
<not
- <baseset
- <last n=1,
- <fullreposet+ 0:9>,
- <baseset [1, 2, 0]>>>>>
+ <baseset [1]>>>
2
0
@@ -1840,7 +2126,7 @@
define)
* set:
<filteredset
- <spanset- 0:2>,
+ <spanset- 0:3>,
<baseset [1]>>
1
@@ -1941,11 +2227,11 @@
define)
* set:
<filteredset
- <spanset+ 0:2>,
+ <spanset+ 0:3>,
<addset
<baseset [2]>,
<filteredset
- <fullreposet+ 0:9>,
+ <fullreposet+ 0:10>,
<contains 'a'>>>>
0
1
@@ -1973,7 +2259,7 @@
* set:
<addset
<filteredset
- <spanset- 0:2>,
+ <spanset- 0:3>,
<contains 'a'>>,
<baseset [2]>>
1
@@ -2398,7 +2684,7 @@
* set:
<addset
<baseset [0, 1]>,
- <spanset+ 2:3>>
+ <spanset+ 2:4>>
0
1
2
@@ -2436,10 +2722,10 @@
* set:
<addset
<addset
- <spanset+ 0:1>,
+ <spanset+ 0:2>,
<baseset [2]>>,
<addset
- <spanset+ 3:4>,
+ <spanset+ 3:5>,
<baseset [5, 6]>>>
0
1
@@ -2554,13 +2840,13 @@
* set:
<addset
<addset
- <spanset+ 0:1>,
- <spanset+ 1:2>>,
+ <spanset+ 0:2>,
+ <spanset+ 1:3>>,
<addset
- <spanset+ 2:3>,
+ <spanset+ 2:4>,
<addset
- <spanset+ 3:4>,
- <spanset+ 4:5>>>>
+ <spanset+ 3:5>,
+ <spanset+ 4:6>>>>
0
1
2
@@ -2694,6 +2980,64 @@
hg: parse error: missing argument
[255]
+optimization to only() works only if ancestors() takes only one argument
+
+ $ hg debugrevspec -p optimized 'ancestors(6) - ancestors(4, 1)'
+ * optimized:
+ (difference
+ (func
+ ('symbol', 'ancestors')
+ ('symbol', '6')
+ define)
+ (func
+ ('symbol', 'ancestors')
+ (list
+ ('symbol', '4')
+ ('symbol', '1'))
+ any)
+ define)
+ hg: parse error: ancestors takes at most 1 positional arguments
+ [255]
+ $ hg debugrevspec -p optimized 'ancestors(6, 1) - ancestors(4)'
+ * optimized:
+ (difference
+ (func
+ ('symbol', 'ancestors')
+ (list
+ ('symbol', '6')
+ ('symbol', '1'))
+ define)
+ (func
+ ('symbol', 'ancestors')
+ ('symbol', '4')
+ any)
+ define)
+ hg: parse error: ancestors takes at most 1 positional arguments
+ [255]
+
+optimization disabled if keyword arguments passed (because we're too lazy
+to support it)
+
+ $ hg debugrevspec -p optimized 'ancestors(set=6) - ancestors(set=4)'
+ * optimized:
+ (difference
+ (func
+ ('symbol', 'ancestors')
+ (keyvalue
+ ('symbol', 'set')
+ ('symbol', '6'))
+ define)
+ (func
+ ('symbol', 'ancestors')
+ (keyvalue
+ ('symbol', 'set')
+ ('symbol', '4'))
+ any)
+ define)
+ 3
+ 5
+ 6
+
invalid function call should not be optimized to only()
$ log '"ancestors"(6) and not ancestors(4)'
@@ -2845,6 +3189,16 @@
$ log 'merge()^^^'
1
+ $ hg debugrevspec -s '(merge() | 0)~-1'
+ * set:
+ <baseset+ [1, 7]>
+ 1
+ 7
+ $ log 'merge()~-1'
+ 7
+ $ log 'tip~-1'
+ $ log '(tip | merge())~-1'
+ 7
$ log 'merge()~0'
6
$ log 'merge()~1'
@@ -2865,6 +3219,10 @@
hg: parse error: ^ expects a number 0, 1, or 2
[255]
+ $ log 'branchpoint()~-1'
+ abort: revision in set has more than one child!
+ [255]
+
Bogus function gets suggestions
$ log 'add()'
hg: parse error: unknown identifier: add
@@ -2965,7 +3323,7 @@
None)
* set:
<filteredset
- <fullreposet+ 0:9>,
+ <fullreposet+ 0:10>,
<merge>>
6
@@ -2986,7 +3344,7 @@
None)
* set:
<filteredset
- <fullreposet+ 0:9>,
+ <fullreposet+ 0:10>,
<merge>>
6
@@ -3044,7 +3402,7 @@
* set:
<addset+
<filteredset
- <fullreposet+ 0:9>,
+ <fullreposet+ 0:10>,
<merge>>,
<generatorset+>>
6
@@ -3107,8 +3465,8 @@
* set:
<baseset
<max
- <fullreposet+ 0:9>,
- <spanset+ 2:5>>>
+ <fullreposet+ 0:10>,
+ <spanset+ 2:6>>>
5
test chained `or` operations are flattened at parsing phase
@@ -3141,10 +3499,10 @@
('symbol', '3'))))
* set:
<addset
- <spanset+ 0:1>,
+ <spanset+ 0:2>,
<addset
- <spanset+ 1:2>,
- <spanset+ 2:3>>>
+ <spanset+ 1:3>,
+ <spanset+ 2:4>>>
0
1
2
@@ -3189,7 +3547,7 @@
* set:
<filteredset
<baseset [0]>,
- <spanset+ 0:9>>
+ <spanset+ 0:10>>
0
test unknown reference:
@@ -3238,7 +3596,7 @@
<addset
<baseset [9]>,
<filteredset
- <fullreposet+ 0:9>,
+ <fullreposet+ 0:10>,
<desc '$1'>>>
9
@@ -3409,10 +3767,7 @@
('symbol', '2')))
* set:
<filteredset
- <baseset
- <limit n=2, offset=0,
- <fullreposet+ 0:9>,
- <baseset [1, 2, 3]>>>,
+ <baseset [1, 2]>,
<not
<baseset [2]>>>
1
@@ -3430,7 +3785,7 @@
<filteredset
<baseset
<max
- <fullreposet+ 0:9>,
+ <fullreposet+ 0:10>,
<baseset [1, 2]>>>,
<not
<baseset [2]>>>
@@ -3448,7 +3803,7 @@
<filteredset
<baseset
<min
- <fullreposet+ 0:9>,
+ <fullreposet+ 0:10>,
<baseset [1, 2]>>>,
<not
<baseset [1]>>>
@@ -3466,10 +3821,7 @@
('symbol', '2')))
* set:
<filteredset
- <baseset
- <last n=1,
- <fullreposet+ 0:9>,
- <baseset [2, 1]>>>,
+ <baseset [2]>,
<not
<baseset [2]>>>
--- a/tests/test-run-tests.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-run-tests.t Tue Jun 20 16:33:46 2017 -0400
@@ -9,7 +9,7 @@
$ run-tests.py $HGTEST_RUN_TESTS_PURE -l
- # Ran 0 tests, 0 skipped, 0 warned, 0 failed.
+ # Ran 0 tests, 0 skipped, 0 failed.
Define a helper to avoid the install step
=============
@@ -25,7 +25,7 @@
$ run-tests.py --with-hg=./hg
warning: --with-hg should specify an hg script
- # Ran 0 tests, 0 skipped, 0 warned, 0 failed.
+ # Ran 0 tests, 0 skipped, 0 failed.
$ rm hg
#endif
@@ -58,7 +58,7 @@
$ touch test-empty.t
$ rt
.
- # Ran 1 tests, 0 skipped, 0 warned, 0 failed.
+ # Ran 1 tests, 0 skipped, 0 failed.
$ rm test-empty.t
a succesful test
@@ -91,7 +91,7 @@
$ rt
.
- # Ran 1 tests, 0 skipped, 0 warned, 0 failed.
+ # Ran 1 tests, 0 skipped, 0 failed.
failing test
==================
@@ -115,7 +115,7 @@
ERROR: test-failure.t output changed
!
Failed test-failure.t: output changed
- # Ran 1 tests, 0 skipped, 0 warned, 1 failed.
+ # Ran 1 tests, 0 skipped, 1 failed.
python hash seed: * (glob)
[1]
@@ -162,10 +162,47 @@
!
Failed test-failure.t: output changed
Failed test-failure-unicode.t: output changed
- # Ran 3 tests, 0 skipped, 0 warned, 2 failed.
+ # Ran 3 tests, 0 skipped, 2 failed.
python hash seed: * (glob)
[1]
+test --outputdir
+ $ mkdir output
+ $ rt --outputdir output
+
+ --- $TESTTMP/test-failure.t
+ +++ $TESTTMP/output/test-failure.t.err
+ @@ -1,5 +1,5 @@
+ $ echo babar
+ - rataxes
+ + babar
+ This is a noop statement so that
+ this test is still more bytes than success.
+ pad pad pad pad............................................................
+
+ ERROR: test-failure.t output changed
+ !.
+ --- $TESTTMP/test-failure-unicode.t
+ +++ $TESTTMP/output/test-failure-unicode.t.err
+ @@ -1,2 +1,2 @@
+ $ echo babar\xce\xb1 (esc)
+ - l\xce\xb5\xce\xb5t (esc)
+ + babar\xce\xb1 (esc)
+
+ ERROR: test-failure-unicode.t output changed
+ !
+ Failed test-failure.t: output changed
+ Failed test-failure-unicode.t: output changed
+ # Ran 3 tests, 0 skipped, 2 failed.
+ python hash seed: * (glob)
+ [1]
+ $ ls -a output
+ .
+ ..
+ .testtimes
+ test-failure-unicode.t.err
+ test-failure.t.err
+
test --xunit support
$ rt --xunit=xunit.xml
@@ -192,7 +229,7 @@
!
Failed test-failure.t: output changed
Failed test-failure-unicode.t: output changed
- # Ran 3 tests, 0 skipped, 0 warned, 2 failed.
+ # Ran 3 tests, 0 skipped, 2 failed.
python hash seed: * (glob)
[1]
$ cat xunit.xml
@@ -200,14 +237,17 @@
<testsuite errors="0" failures="2" name="run-tests" skipped="0" tests="3">
<testcase name="test-success.t" time="*"/> (glob)
<testcase name="test-failure-unicode.t" time="*"> (glob)
+ <failure message="output changed" type="output-mismatch">
<![CDATA[--- $TESTTMP/test-failure-unicode.t
+++ $TESTTMP/test-failure-unicode.t.err
@@ -1,2 +1,2 @@
$ echo babar\xce\xb1 (esc)
- l\xce\xb5\xce\xb5t (esc)
+ babar\xce\xb1 (esc)
- ]]> </testcase>
+ ]]> </failure>
+ </testcase>
<testcase name="test-failure.t" time="*"> (glob)
+ <failure message="output changed" type="output-mismatch">
<![CDATA[--- $TESTTMP/test-failure.t
+++ $TESTTMP/test-failure.t.err
@@ -1,5 +1,5 @@
@@ -217,13 +257,68 @@
This is a noop statement so that
this test is still more bytes than success.
pad pad pad pad............................................................
- ]]> </testcase>
+ ]]> </failure>
+ </testcase>
</testsuite>
$ cat .testtimes
test-failure-unicode.t * (glob)
test-failure.t * (glob)
test-success.t * (glob)
+
+ $ rt --list-tests
+ test-failure-unicode.t
+ test-failure.t
+ test-success.t
+
+ $ rt --list-tests --json
+ test-failure-unicode.t
+ test-failure.t
+ test-success.t
+ $ cat report.json
+ testreport ={
+ "test-failure-unicode.t": {
+ "result": "success"
+ },
+ "test-failure.t": {
+ "result": "success"
+ },
+ "test-success.t": {
+ "result": "success"
+ }
+ } (no-eol)
+
+ $ rt --list-tests --xunit=xunit.xml
+ test-failure-unicode.t
+ test-failure.t
+ test-success.t
+ $ cat xunit.xml
+ <?xml version="1.0" encoding="utf-8"?>
+ <testsuite errors="0" failures="0" name="run-tests" skipped="0" tests="0">
+ <testcase name="test-failure-unicode.t"/>
+ <testcase name="test-failure.t"/>
+ <testcase name="test-success.t"/>
+ </testsuite>
+
+ $ rt --list-tests test-failure* --json --xunit=xunit.xml --outputdir output
+ test-failure-unicode.t
+ test-failure.t
+ $ cat output/report.json
+ testreport ={
+ "test-failure-unicode.t": {
+ "result": "success"
+ },
+ "test-failure.t": {
+ "result": "success"
+ }
+ } (no-eol)
+ $ cat xunit.xml
+ <?xml version="1.0" encoding="utf-8"?>
+ <testsuite errors="0" failures="0" name="run-tests" skipped="0" tests="0">
+ <testcase name="test-failure-unicode.t"/>
+ <testcase name="test-failure.t"/>
+ </testsuite>
+
$ rm test-failure-unicode.t
test for --retest
@@ -244,7 +339,30 @@
ERROR: test-failure.t output changed
!
Failed test-failure.t: output changed
- # Ran 2 tests, 1 skipped, 0 warned, 1 failed.
+ # Ran 2 tests, 1 skipped, 1 failed.
+ python hash seed: * (glob)
+ [1]
+
+--retest works with --outputdir
+ $ rm -r output
+ $ mkdir output
+ $ mv test-failure.t.err output
+ $ rt --retest --outputdir output
+
+ --- $TESTTMP/test-failure.t
+ +++ $TESTTMP/output/test-failure.t.err
+ @@ -1,5 +1,5 @@
+ $ echo babar
+ - rataxes
+ + babar
+ This is a noop statement so that
+ this test is still more bytes than success.
+ pad pad pad pad............................................................
+
+ ERROR: test-failure.t output changed
+ !
+ Failed test-failure.t: output changed
+ # Ran 2 tests, 1 skipped, 1 failed.
python hash seed: * (glob)
[1]
@@ -255,12 +373,12 @@
$ rt test-success.t
.
- # Ran 1 tests, 0 skipped, 0 warned, 0 failed.
+ # Ran 1 tests, 0 skipped, 0 failed.
success w/ keyword
$ rt -k xyzzy
.
- # Ran 2 tests, 1 skipped, 0 warned, 0 failed.
+ # Ran 2 tests, 1 skipped, 0 failed.
failed
@@ -279,7 +397,7 @@
ERROR: test-failure.t output changed
!
Failed test-failure.t: output changed
- # Ran 1 tests, 0 skipped, 0 warned, 1 failed.
+ # Ran 1 tests, 0 skipped, 1 failed.
python hash seed: * (glob)
[1]
@@ -299,7 +417,7 @@
ERROR: test-failure.t output changed
!
Failed test-failure.t: output changed
- # Ran 2 tests, 1 skipped, 0 warned, 1 failed.
+ # Ran 2 tests, 1 skipped, 1 failed.
python hash seed: * (glob)
[1]
@@ -314,7 +432,7 @@
ERROR: test-serve-fail.t output changed
!
Failed test-serve-fail.t: server failed to start (HGPORT=*) (glob)
- # Ran 1 tests, 0 skipped, 0 warned, 1 failed.
+ # Ran 1 tests, 0 skipped, 1 failed.
python hash seed: * (glob)
[1]
$ rm test-serve-fail.t
@@ -330,7 +448,7 @@
> EOF
$ rt test-serve-inuse.t
.
- # Ran 1 tests, 0 skipped, 0 warned, 0 failed.
+ # Ran 1 tests, 0 skipped, 0 failed.
$ rm test-serve-inuse.t
$ killdaemons.py $DAEMON_PIDS
$ rm $DAEMON_PIDS
@@ -368,7 +486,7 @@
+ echo *SALT* 22 0 (glob)
*SALT* 22 0 (glob)
.
- # Ran 2 tests, 0 skipped, 0 warned, 0 failed.
+ # Ran 2 tests, 0 skipped, 0 failed.
Parallel runs
==============
@@ -380,7 +498,7 @@
!!
Failed test-failure*.t: output changed (glob)
Failed test-failure*.t: output changed (glob)
- # Ran 2 tests, 0 skipped, 0 warned, 2 failed.
+ # Ran 2 tests, 0 skipped, 2 failed.
python hash seed: * (glob)
[1]
@@ -402,7 +520,7 @@
Failed test-failure*.t: output changed (glob)
Failed test-nothing.t: output changed
- # Ran 2 tests, 0 skipped, 0 warned, 2 failed.
+ # Ran 2 tests, 0 skipped, 2 failed.
python hash seed: * (glob)
[1]
@@ -434,7 +552,7 @@
ERROR: test-failure.t output changed
!.
Failed test-failure.t: output changed
- # Ran 2 tests, 0 skipped, 0 warned, 1 failed.
+ # Ran 2 tests, 0 skipped, 1 failed.
python hash seed: * (glob)
[1]
@@ -458,7 +576,7 @@
ERROR: test-failure.t output changed
!.
Failed test-failure.t: output changed
- # Ran 2 tests, 0 skipped, 0 warned, 1 failed.
+ # Ran 2 tests, 0 skipped, 1 failed.
python hash seed: * (glob)
[1]
@@ -470,7 +588,7 @@
ERROR: test-failure.t output changed
!.
Failed test-failure.t: output changed
- # Ran 2 tests, 0 skipped, 0 warned, 1 failed.
+ # Ran 2 tests, 0 skipped, 1 failed.
python hash seed: * (glob)
[1]
@@ -503,7 +621,7 @@
saved backup bundle to $TESTTMP/foo.hg* (glob)
$ echo 'saved backup bundle to $TESTTMP/foo.hg'
Accept this change? [n] ..
- # Ran 2 tests, 0 skipped, 0 warned, 0 failed.
+ # Ran 2 tests, 0 skipped, 0 failed.
$ sed -e 's,(glob)$,&<,g' test-failure.t
$ echo babar
@@ -532,7 +650,7 @@
$ rt --nodiff
!.
Failed test-failure.t: output changed
- # Ran 2 tests, 0 skipped, 0 warned, 1 failed.
+ # Ran 2 tests, 0 skipped, 1 failed.
python hash seed: * (glob)
[1]
@@ -542,7 +660,7 @@
Keeping testtmp dir: $TESTTMP/keep/child1/test-success.t (glob)
Keeping threadtmp dir: $TESTTMP/keep/child1 (glob)
.
- # Ran 1 tests, 0 skipped, 0 warned, 0 failed.
+ # Ran 1 tests, 0 skipped, 0 failed.
timeouts
========
@@ -555,16 +673,16 @@
> cat test-timeout.t >> test-slow-timeout.t
$ rt --timeout=1 --slowtimeout=3 test-timeout.t test-slow-timeout.t
st
- Skipped test-slow-timeout.t: missing feature: allow slow tests
+ Skipped test-slow-timeout.t: missing feature: allow slow tests (use --allow-slow-tests)
Failed test-timeout.t: timed out
- # Ran 1 tests, 1 skipped, 0 warned, 1 failed.
+ # Ran 1 tests, 1 skipped, 1 failed.
python hash seed: * (glob)
[1]
$ rt --timeout=1 --slowtimeout=3 \
> test-timeout.t test-slow-timeout.t --allow-slow-tests
.t
Failed test-timeout.t: timed out
- # Ran 2 tests, 0 skipped, 0 warned, 1 failed.
+ # Ran 2 tests, 0 skipped, 1 failed.
python hash seed: * (glob)
[1]
$ rm test-timeout.t test-slow-timeout.t
@@ -574,7 +692,7 @@
$ rt test-success.t --time
.
- # Ran 1 tests, 0 skipped, 0 warned, 0 failed.
+ # Ran 1 tests, 0 skipped, 0 failed.
# Producing time report
start end cuser csys real Test
\s*[\d\.]{5} \s*[\d\.]{5} \s*[\d\.]{5} \s*[\d\.]{5} \s*[\d\.]{5} test-success.t (re)
@@ -584,7 +702,7 @@
$ rt test-success.t --time --jobs 2
.
- # Ran 1 tests, 0 skipped, 0 warned, 0 failed.
+ # Ran 1 tests, 0 skipped, 0 failed.
# Producing time report
start end cuser csys real Test
\s*[\d\.]{5} \s*[\d\.]{5} \s*[\d\.]{5} \s*[\d\.]{5} \s*[\d\.]{5} test-success.t (re)
@@ -599,25 +717,29 @@
!.s
Skipped test-skip.t: missing feature: nail clipper
Failed test-failure.t: output changed
- # Ran 2 tests, 1 skipped, 0 warned, 1 failed.
+ # Ran 2 tests, 1 skipped, 1 failed.
python hash seed: * (glob)
[1]
$ rt --keyword xyzzy
.s
Skipped test-skip.t: missing feature: nail clipper
- # Ran 2 tests, 2 skipped, 0 warned, 0 failed.
+ # Ran 2 tests, 2 skipped, 0 failed.
Skips with xml
$ rt --keyword xyzzy \
> --xunit=xunit.xml
.s
Skipped test-skip.t: missing feature: nail clipper
- # Ran 2 tests, 2 skipped, 0 warned, 0 failed.
+ # Ran 2 tests, 2 skipped, 0 failed.
$ cat xunit.xml
<?xml version="1.0" encoding="utf-8"?>
<testsuite errors="0" failures="0" name="run-tests" skipped="2" tests="2">
<testcase name="test-success.t" time="*"/> (glob)
+ <testcase name="test-skip.t">
+ <skipped>
+ <![CDATA[missing feature: nail clipper]]> </skipped>
+ </testcase>
</testsuite>
Missing skips or blacklisted skips don't count as executed:
@@ -627,7 +749,7 @@
ss
Skipped test-bogus.t: Doesn't exist
Skipped test-failure.t: blacklisted
- # Ran 0 tests, 2 skipped, 0 warned, 0 failed.
+ # Ran 0 tests, 2 skipped, 0 failed.
$ cat report.json
testreport ={
"test-bogus.t": {
@@ -657,7 +779,7 @@
!
Skipped test-bogus.t: Doesn't exist
Failed test-failure.t: output changed
- # Ran 1 tests, 1 skipped, 0 warned, 1 failed.
+ # Ran 1 tests, 1 skipped, 1 failed.
python hash seed: * (glob)
[1]
@@ -680,7 +802,7 @@
!.s
Skipped test-skip.t: missing feature: nail clipper
Failed test-failure.t: output changed
- # Ran 2 tests, 1 skipped, 0 warned, 1 failed.
+ # Ran 2 tests, 1 skipped, 1 failed.
python hash seed: * (glob)
[1]
@@ -714,6 +836,68 @@
"time": "\s*[\d\.]{4,5}" (re)
}
} (no-eol)
+--json with --outputdir
+
+ $ rm report.json
+ $ rm -r output
+ $ mkdir output
+ $ rt --json --outputdir output
+
+ --- $TESTTMP/test-failure.t
+ +++ $TESTTMP/output/test-failure.t.err
+ @@ -1,5 +1,5 @@
+ $ echo babar
+ - rataxes
+ + babar
+ This is a noop statement so that
+ this test is still more bytes than success.
+ pad pad pad pad............................................................
+
+ ERROR: test-failure.t output changed
+ !.s
+ Skipped test-skip.t: missing feature: nail clipper
+ Failed test-failure.t: output changed
+ # Ran 2 tests, 1 skipped, 1 failed.
+ python hash seed: * (glob)
+ [1]
+ $ f report.json
+ report.json: file not found
+ $ cat output/report.json
+ testreport ={
+ "test-failure.t": [\{] (re)
+ "csys": "\s*[\d\.]{4,5}", ? (re)
+ "cuser": "\s*[\d\.]{4,5}", ? (re)
+ "diff": "---.+\+\+\+.+", ? (re)
+ "end": "\s*[\d\.]{4,5}", ? (re)
+ "result": "failure", ? (re)
+ "start": "\s*[\d\.]{4,5}", ? (re)
+ "time": "\s*[\d\.]{4,5}" (re)
+ }, ? (re)
+ "test-skip.t": {
+ "csys": "\s*[\d\.]{4,5}", ? (re)
+ "cuser": "\s*[\d\.]{4,5}", ? (re)
+ "diff": "", ? (re)
+ "end": "\s*[\d\.]{4,5}", ? (re)
+ "result": "skip", ? (re)
+ "start": "\s*[\d\.]{4,5}", ? (re)
+ "time": "\s*[\d\.]{4,5}" (re)
+ }, ? (re)
+ "test-success.t": [\{] (re)
+ "csys": "\s*[\d\.]{4,5}", ? (re)
+ "cuser": "\s*[\d\.]{4,5}", ? (re)
+ "diff": "", ? (re)
+ "end": "\s*[\d\.]{4,5}", ? (re)
+ "result": "success", ? (re)
+ "start": "\s*[\d\.]{4,5}", ? (re)
+ "time": "\s*[\d\.]{4,5}" (re)
+ }
+ } (no-eol)
+ $ ls -a output
+ .
+ ..
+ .testtimes
+ report.json
+ test-failure.t.err
Test that failed test accepted through interactive are properly reported:
@@ -731,7 +915,7 @@
pad pad pad pad............................................................
Accept this change? [n] ..s
Skipped test-skip.t: missing feature: nail clipper
- # Ran 2 tests, 1 skipped, 0 warned, 0 failed.
+ # Ran 2 tests, 1 skipped, 0 failed.
$ cat report.json
testreport ={
@@ -774,7 +958,7 @@
$ rt test-glob-backslash.t
.
- # Ran 1 tests, 0 skipped, 0 warned, 0 failed.
+ # Ran 1 tests, 0 skipped, 0 failed.
$ rm -f test-glob-backslash.t
@@ -800,7 +984,7 @@
> EOF
$ rt test-hghave.t
.
- # Ran 1 tests, 0 skipped, 0 warned, 0 failed.
+ # Ran 1 tests, 0 skipped, 0 failed.
test that RUNTESTDIR refers the directory, in which `run-tests.py` now
running is placed.
@@ -816,14 +1000,14 @@
> $ test "\$TESTDIR" = "$TESTTMP"/anothertests
> #endif
> $ test "\$RUNTESTDIR" = "$TESTDIR"
- > $ head -n 3 "\$RUNTESTDIR"/../contrib/check-code.py
- > #!/usr/bin/env python
+ > $ head -n 3 "\$RUNTESTDIR"/../contrib/check-code.py | sed 's@.!.*python@#!USRBINENVPY@'
+ > #!USRBINENVPY
> #
> # check-code - a style and portability checker for Mercurial
> EOF
$ rt test-runtestdir.t
.
- # Ran 1 tests, 0 skipped, 0 warned, 0 failed.
+ # Ran 1 tests, 0 skipped, 0 failed.
#if execbit
@@ -840,7 +1024,7 @@
> EOF
$ rt test-testdir-path.t
.
- # Ran 1 tests, 0 skipped, 0 warned, 0 failed.
+ # Ran 1 tests, 0 skipped, 0 failed.
#endif
@@ -852,11 +1036,11 @@
> EOF
$ rt test-very-slow-test.t
s
- Skipped test-very-slow-test.t: missing feature: allow slow tests
- # Ran 0 tests, 1 skipped, 0 warned, 0 failed.
+ Skipped test-very-slow-test.t: missing feature: allow slow tests (use --allow-slow-tests)
+ # Ran 0 tests, 1 skipped, 0 failed.
$ rt $HGTEST_RUN_TESTS_PURE --allow-slow-tests test-very-slow-test.t
.
- # Ran 1 tests, 0 skipped, 0 warned, 0 failed.
+ # Ran 1 tests, 0 skipped, 0 failed.
support for running a test outside the current directory
$ mkdir nonlocal
@@ -866,7 +1050,7 @@
> EOF
$ rt nonlocal/test-is-not-here.t
.
- # Ran 1 tests, 0 skipped, 0 warned, 0 failed.
+ # Ran 1 tests, 0 skipped, 0 failed.
support for bisecting failed tests automatically
$ hg init bisect
@@ -897,6 +1081,130 @@
!
Failed test-bisect.t: output changed
test-bisect.t broken by 72cbf122d116 (bad)
- # Ran 1 tests, 0 skipped, 0 warned, 1 failed.
+ # Ran 1 tests, 0 skipped, 1 failed.
python hash seed: * (glob)
[1]
+
+ $ cd ..
+
+Test a broken #if statement doesn't break run-tests threading.
+==============================================================
+ $ mkdir broken
+ $ cd broken
+ $ cat > test-broken.t <<EOF
+ > true
+ > #if notarealhghavefeature
+ > $ false
+ > #endif
+ > EOF
+ $ for f in 1 2 3 4 ; do
+ > cat > test-works-$f.t <<EOF
+ > This is test case $f
+ > $ sleep 1
+ > EOF
+ > done
+ $ rt -j 2
+ ....
+ # Ran 5 tests, 0 skipped, 0 failed.
+ skipped: unknown feature: notarealhghavefeature
+
+ $ cd ..
+ $ rm -rf broken
+
+Test cases in .t files
+======================
+ $ mkdir cases
+ $ cd cases
+ $ cat > test-cases-abc.t <<'EOF'
+ > #testcases A B C
+ > $ V=B
+ > #if A
+ > $ V=A
+ > #endif
+ > #if C
+ > $ V=C
+ > #endif
+ > $ echo $V | sed 's/A/C/'
+ > C
+ > #if C
+ > $ [ $V = C ]
+ > #endif
+ > #if A
+ > $ [ $V = C ]
+ > [1]
+ > #endif
+ > #if no-C
+ > $ [ $V = C ]
+ > [1]
+ > #endif
+ > $ [ $V = D ]
+ > [1]
+ > EOF
+ $ rt
+ .
+ --- $TESTTMP/anothertests/cases/test-cases-abc.t
+ +++ $TESTTMP/anothertests/cases/test-cases-abc.t.B.err
+ @@ -7,7 +7,7 @@
+ $ V=C
+ #endif
+ $ echo $V | sed 's/A/C/'
+ - C
+ + B
+ #if C
+ $ [ $V = C ]
+ #endif
+
+ ERROR: test-cases-abc.t (case B) output changed
+ !.
+ Failed test-cases-abc.t (case B): output changed
+ # Ran 3 tests, 0 skipped, 1 failed.
+ python hash seed: * (glob)
+ [1]
+
+--restart works
+
+ $ rt --restart
+
+ --- $TESTTMP/anothertests/cases/test-cases-abc.t
+ +++ $TESTTMP/anothertests/cases/test-cases-abc.t.B.err
+ @@ -7,7 +7,7 @@
+ $ V=C
+ #endif
+ $ echo $V | sed 's/A/C/'
+ - C
+ + B
+ #if C
+ $ [ $V = C ]
+ #endif
+
+ ERROR: test-cases-abc.t (case B) output changed
+ !.
+ Failed test-cases-abc.t (case B): output changed
+ # Ran 2 tests, 0 skipped, 1 failed.
+ python hash seed: * (glob)
+ [1]
+
+--restart works with outputdir
+
+ $ mkdir output
+ $ mv test-cases-abc.t.B.err output
+ $ rt --restart --outputdir output
+
+ --- $TESTTMP/anothertests/cases/test-cases-abc.t
+ +++ $TESTTMP/anothertests/cases/output/test-cases-abc.t.B.err
+ @@ -7,7 +7,7 @@
+ $ V=C
+ #endif
+ $ echo $V | sed 's/A/C/'
+ - C
+ + B
+ #if C
+ $ [ $V = C ]
+ #endif
+
+ ERROR: test-cases-abc.t (case B) output changed
+ !.
+ Failed test-cases-abc.t (case B): output changed
+ # Ran 2 tests, 0 skipped, 1 failed.
+ python hash seed: * (glob)
+ [1]
--- a/tests/test-setdiscovery.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-setdiscovery.t Tue Jun 20 16:33:46 2017 -0400
@@ -83,7 +83,7 @@
taking initial sample
searching: 2 queries
query 2; still undecided: 29, sample size is: 29
- 2 total queries
+ 2 total queries in *.????s (glob)
common heads: bebd167eb94d
% -- b -> a tree
@@ -99,10 +99,9 @@
taking initial sample
searching: 2 queries
query 2; still undecided: 2, sample size is: 2
- 2 total queries
+ 2 total queries in *.????s (glob)
common heads: bebd167eb94d
-
Both sides many new with stub:
$ testdesc '-ra1 -ra2' '-rb' '
@@ -122,7 +121,7 @@
taking initial sample
searching: 2 queries
query 2; still undecided: 29, sample size is: 29
- 2 total queries
+ 2 total queries in *.????s (glob)
common heads: 2dc09a01254d
% -- b -> a tree
@@ -138,7 +137,7 @@
taking initial sample
searching: 2 queries
query 2; still undecided: 29, sample size is: 29
- 2 total queries
+ 2 total queries in *.????s (glob)
common heads: 2dc09a01254d
@@ -161,7 +160,7 @@
taking quick initial sample
searching: 2 queries
query 2; still undecided: 31, sample size is: 31
- 2 total queries
+ 2 total queries in *.????s (glob)
common heads: 66f7d451a68b
% -- b -> a tree
@@ -177,7 +176,7 @@
taking quick initial sample
searching: 2 queries
query 2; still undecided: 31, sample size is: 31
- 2 total queries
+ 2 total queries in *.????s (glob)
common heads: 66f7d451a68b
@@ -200,7 +199,7 @@
taking quick initial sample
searching: 2 queries
query 2; still undecided: 51, sample size is: 51
- 2 total queries
+ 2 total queries in *.????s (glob)
common heads: 66f7d451a68b
% -- b -> a tree
@@ -216,7 +215,7 @@
taking quick initial sample
searching: 2 queries
query 2; still undecided: 31, sample size is: 31
- 2 total queries
+ 2 total queries in *.????s (glob)
common heads: 66f7d451a68b
@@ -242,7 +241,7 @@
sampling from both directions
searching: 3 queries
query 3; still undecided: 31, sample size is: 31
- 3 total queries
+ 3 total queries in *.????s (glob)
common heads: 7ead0cba2838
% -- b -> a tree
@@ -261,7 +260,7 @@
sampling from both directions
searching: 3 queries
query 3; still undecided: 15, sample size is: 15
- 3 total queries
+ 3 total queries in *.????s (glob)
common heads: 7ead0cba2838
@@ -324,7 +323,7 @@
sampling from both directions
searching: 6 queries
query 6; still undecided: \d+, sample size is: \d+ (re)
- 6 total queries
+ 6 total queries in *.????s (glob)
common heads: 3ee37d65064a
Test actual protocol when pulling one new head in addition to common heads
@@ -364,9 +363,9 @@
#if false
generate new bundles:
$ hg init r1
- $ for i in `python $TESTDIR/seq.py 101`; do hg -R r1 up -qr null && hg -R r1 branch -q b$i && hg -R r1 ci -qmb$i; done
+ $ for i in `$PYTHON $TESTDIR/seq.py 101`; do hg -R r1 up -qr null && hg -R r1 branch -q b$i && hg -R r1 ci -qmb$i; done
$ hg clone -q r1 r2
- $ for i in `python $TESTDIR/seq.py 10`; do hg -R r1 up -qr null && hg -R r1 branch -q c$i && hg -R r1 ci -qmc$i; done
+ $ for i in `$PYTHON $TESTDIR/seq.py 10`; do hg -R r1 up -qr null && hg -R r1 branch -q c$i && hg -R r1 ci -qmc$i; done
$ hg -R r2 branch -q r2change && hg -R r2 ci -qmr2change
$ hg -R r1 bundle -qa $TESTDIR/bundles/issue4438-r1.hg
$ hg -R r2 bundle -qa $TESTDIR/bundles/issue4438-r2.hg
@@ -399,8 +398,13 @@
> unrandomsample = $TESTTMP/unrandomsample.py
> EOF
- $ hg -R r1 outgoing r2 -T'{rev} '
+ $ hg -R r1 outgoing r2 -T'{rev} ' --config extensions.blackbox=
comparing with r2
searching for changes
101 102 103 104 105 106 107 108 109 110 (no-eol)
+ $ hg -R r1 --config extensions.blackbox= blackbox
+ * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> outgoing r2 *-T{rev} * (glob)
+ * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> found 101 common and 1 unknown server heads, 2 roundtrips in *.????s (glob)
+ * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* exited 0 after *.?? seconds (glob)
+ * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> blackbox (glob)
$ cd ..
--- a/tests/test-shelve.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-shelve.t Tue Jun 20 16:33:46 2017 -0400
@@ -1591,9 +1591,8 @@
Removing restore branch information from shelvedstate file(making it looks like
in previous versions) and running unshelve --continue
- $ head -n 6 < .hg/shelvedstate > .hg/shelvedstate_oldformat
- $ rm .hg/shelvedstate
- $ mv .hg/shelvedstate_oldformat .hg/shelvedstate
+ $ cp .hg/shelvedstate .hg/shelvedstate_old
+ $ cat .hg/shelvedstate_old | grep -v 'branchtorestore' > .hg/shelvedstate
$ echo "aaabbbccc" > a
$ rm a.orig
@@ -1737,3 +1736,48 @@
[255]
$ hg st
! a
+ $ cd ..
+
+New versions of Mercurial know how to read onld shelvedstate files
+ $ hg init oldshelvedstate
+ $ cd oldshelvedstate
+ $ echo root > root && hg ci -Am root
+ adding root
+ $ echo 1 > a
+ $ hg add a
+ $ hg shelve --name ashelve
+ shelved as ashelve
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ echo 2 > a
+ $ hg ci -Am a
+ adding a
+ $ hg unshelve
+ unshelving change 'ashelve'
+ rebasing shelved changes
+ rebasing 2:003d2d94241c "changes to: root" (tip)
+ merging a
+ warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
+ unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
+ [1]
+putting v1 shelvedstate file in place of a created v2
+ $ cat << EOF > .hg/shelvedstate
+ > 1
+ > ashelve
+ > 8b058dae057a5a78f393f4535d9e363dd5efac9d
+ > 8b058dae057a5a78f393f4535d9e363dd5efac9d
+ > 8b058dae057a5a78f393f4535d9e363dd5efac9d 003d2d94241cc7aff0c3a148e966d6a4a377f3a7
+ > 003d2d94241cc7aff0c3a148e966d6a4a377f3a7
+ >
+ > nokeep
+ > :no-active-bookmark
+ > EOF
+ $ echo 1 > a
+ $ hg resolve --mark a
+ (no more unresolved files)
+ continue: hg unshelve --continue
+mercurial does not crash
+ $ hg unshelve --continue
+ rebasing 2:003d2d94241c "changes to: root" (tip)
+ unshelve of 'ashelve' complete
+ $ cd ..
+
--- a/tests/test-simple-update.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-simple-update.t Tue Jun 20 16:33:46 2017 -0400
@@ -77,7 +77,7 @@
> [worker]
> numcpus = 4
> EOF
- $ for i in `python $TESTDIR/seq.py 1 100`; do
+ $ for i in `$PYTHON $TESTDIR/seq.py 1 100`; do
> echo $i > $i
> done
$ hg ci -qAm 'add 100 files'
--- a/tests/test-simplekeyvaluefile.py Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-simplekeyvaluefile.py Tue Jun 20 16:33:46 2017 -0400
@@ -33,7 +33,8 @@
return mockfile(path, self).read()
def readlines(self, path):
- return mockfile(path, self).read().split('\n')
+ # lines need to contain the trailing '\n' to mock the real readlines
+ return [l for l in mockfile(path, self).read().splitlines(True)]
def __call__(self, path, mode, atomictemp):
return mockfile(path, self)
@@ -42,32 +43,42 @@
def setUp(self):
self.vfs = mockvfs()
- def testbasicwriting(self):
- d = {'key1': 'value1', 'Key2': 'value2'}
- scmutil.simplekeyvaluefile(self.vfs, 'kvfile').write(d)
+ def testbasicwritingiandreading(self):
+ dw = {'key1': 'value1', 'Key2': 'value2'}
+ scmutil.simplekeyvaluefile(self.vfs, 'kvfile').write(dw)
self.assertEqual(sorted(self.vfs.read('kvfile').split('\n')),
['', 'Key2=value2', 'key1=value1'])
+ dr = scmutil.simplekeyvaluefile(self.vfs, 'kvfile').read()
+ self.assertEqual(dr, dw)
def testinvalidkeys(self):
d = {'0key1': 'value1', 'Key2': 'value2'}
- self.assertRaises(error.ProgrammingError,
- scmutil.simplekeyvaluefile(self.vfs, 'kvfile').write,
- d)
+ with self.assertRaisesRegexp(error.ProgrammingError,
+ 'keys must start with a letter.*'):
+ scmutil.simplekeyvaluefile(self.vfs, 'kvfile').write(d)
+
d = {'key1@': 'value1', 'Key2': 'value2'}
- self.assertRaises(error.ProgrammingError,
- scmutil.simplekeyvaluefile(self.vfs, 'kvfile').write,
- d)
+ with self.assertRaisesRegexp(error.ProgrammingError, 'invalid key.*'):
+ scmutil.simplekeyvaluefile(self.vfs, 'kvfile').write(d)
def testinvalidvalues(self):
d = {'key1': 'value1', 'Key2': 'value2\n'}
- self.assertRaises(error.ProgrammingError,
- scmutil.simplekeyvaluefile(self.vfs, 'kvfile').write,
- d)
+ with self.assertRaisesRegexp(error.ProgrammingError, 'invalid val.*'):
+ scmutil.simplekeyvaluefile(self.vfs, 'kvfile').write(d)
def testcorruptedfile(self):
self.vfs.contents['badfile'] = 'ababagalamaga\n'
- self.assertRaises(error.CorruptedState,
- scmutil.simplekeyvaluefile(self.vfs, 'badfile').read)
+ with self.assertRaisesRegexp(error.CorruptedState,
+ 'dictionary.*element.*'):
+ scmutil.simplekeyvaluefile(self.vfs, 'badfile').read()
+
+ def testfirstline(self):
+ dw = {'key1': 'value1'}
+ scmutil.simplekeyvaluefile(self.vfs, 'fl').write(dw, firstline='1.0')
+ self.assertEqual(self.vfs.read('fl'), '1.0\nkey1=value1\n')
+ dr = scmutil.simplekeyvaluefile(self.vfs, 'fl')\
+ .read(firstlinenonkeyval=True)
+ self.assertEqual(dr, {'__firstline': '1.0', 'key1': 'value1'})
if __name__ == "__main__":
silenttestrunner.main(__name__)
--- a/tests/test-simplemerge.py Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-simplemerge.py Tue Jun 20 16:33:46 2017 -0400
@@ -326,7 +326,8 @@
self.assertEquals(ml, MERGED_RESULT)
def test_binary(self):
- self.assertRaises(error.Abort, Merge3, ['\x00'], ['a'], ['b'])
+ with self.assertRaises(error.Abort):
+ Merge3(['\x00'], ['a'], ['b'])
def test_dos_text(self):
base_text = 'a\r\n'
--- a/tests/test-static-http.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-static-http.t Tue Jun 20 16:33:46 2017 -0400
@@ -9,7 +9,7 @@
This server doesn't do range requests so it's basically only good for
one pull
- $ python "$TESTDIR/dumbhttp.py" -p $HGPORT --pid dumb.pid
+ $ $PYTHON "$TESTDIR/dumbhttp.py" -p $HGPORT --pid dumb.pid
$ cat dumb.pid >> $DAEMON_PIDS
$ hg init remote
$ cd remote
--- a/tests/test-status-rev.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-status-rev.t Tue Jun 20 16:33:46 2017 -0400
@@ -5,7 +5,7 @@
First commit
- $ python $TESTDIR/generate-working-copy-states.py state 2 1
+ $ $PYTHON $TESTDIR/generate-working-copy-states.py state 2 1
$ hg addremove --similarity 0
adding content1_content1_content1-tracked
adding content1_content1_content1-untracked
@@ -31,7 +31,7 @@
Second commit
- $ python $TESTDIR/generate-working-copy-states.py state 2 2
+ $ $PYTHON $TESTDIR/generate-working-copy-states.py state 2 2
$ hg addremove --similarity 0
removing content1_missing_content1-tracked
removing content1_missing_content1-untracked
@@ -49,7 +49,7 @@
Working copy
- $ python $TESTDIR/generate-working-copy-states.py state 2 wc
+ $ $PYTHON $TESTDIR/generate-working-copy-states.py state 2 wc
$ hg addremove --similarity 0
adding content1_missing_content1-tracked
adding content1_missing_content1-untracked
--- a/tests/test-status.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-status.t Tue Jun 20 16:33:46 2017 -0400
@@ -107,6 +107,29 @@
? a/in_a
? b/in_b
+tweaking defaults works
+ $ hg status --cwd a --config ui.tweakdefaults=yes
+ ? 1/in_a_1
+ ? in_a
+ ? ../b/1/in_b_1
+ ? ../b/2/in_b_2
+ ? ../b/in_b
+ ? ../in_root
+ $ HGPLAIN=1 hg status --cwd a --config ui.tweakdefaults=yes
+ ? a/1/in_a_1 (glob)
+ ? a/in_a (glob)
+ ? b/1/in_b_1 (glob)
+ ? b/2/in_b_2 (glob)
+ ? b/in_b (glob)
+ ? in_root
+ $ HGPLAINEXCEPT=tweakdefaults hg status --cwd a --config ui.tweakdefaults=yes
+ ? 1/in_a_1 (glob)
+ ? in_a
+ ? ../b/1/in_b_1 (glob)
+ ? ../b/2/in_b_2 (glob)
+ ? ../b/in_b (glob)
+ ? ../in_root (glob)
+
relative paths can be requested
$ cat >> $HGRCPATH <<EOF
@@ -128,6 +151,19 @@
? b/in_b (glob)
? in_root
+if relative paths are explicitly off, tweakdefaults doesn't change it
+ $ cat >> $HGRCPATH <<EOF
+ > [commands]
+ > status.relative = False
+ > EOF
+ $ hg status --cwd a --config ui.tweakdefaults=yes
+ ? a/1/in_a_1
+ ? a/in_a
+ ? b/1/in_b_1
+ ? b/2/in_b_2
+ ? b/in_b
+ ? in_root
+
$ cd ..
$ hg init repo2
--- a/tests/test-strip.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-strip.t Tue Jun 20 16:33:46 2017 -0400
@@ -287,6 +287,7 @@
$ hg up
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "264128213d29: c"
1 other heads for branch "default"
$ hg log -G
@ changeset: 4:264128213d29
@@ -841,6 +842,7 @@
bundle2-output-bundle: "HG20", (1 params) 1 parts total
bundle2-output-part: "changegroup" (params: 1 mandatory 1 advisory) streamed payload
saved backup bundle to $TESTTMP/issue4736/.hg/strip-backup/6625a5168474-345bb43d-backup.hg (glob)
+ updating the branch cache
invalid branchheads cache (served): tip differs
truncating cache/rbc-revs-v1 to 24
$ hg log -G
--- a/tests/test-subrepo-git.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-subrepo-git.t Tue Jun 20 16:33:46 2017 -0400
@@ -2,6 +2,11 @@
make git commits repeatable
+ $ cat >> $HGRCPATH <<EOF
+ > [defaults]
+ > commit = -d "0 0"
+ > EOF
+
$ echo "[core]" >> $HOME/.gitconfig
$ echo "autocrlf = false" >> $HOME/.gitconfig
$ GIT_AUTHOR_NAME='test'; export GIT_AUTHOR_NAME
@@ -1132,6 +1137,8 @@
? s/foobar.orig
? s/snake.python.orig
+#if git19
+
test for Git CVE-2016-3068
$ hg init malicious-subrepository
$ cd malicious-subrepository
@@ -1173,3 +1180,5 @@
[255]
$ f -Dq pwned.txt
pwned: you asked for it
+
+#endif
--- a/tests/test-subrepo-missing.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-subrepo-missing.t Tue Jun 20 16:33:46 2017 -0400
@@ -34,7 +34,6 @@
$ hg revert .hgsub
warning: subrepo spec file '.hgsub' not found
warning: subrepo spec file '.hgsub' not found
- warning: subrepo spec file '.hgsub' not found
delete .hgsubstate and revert it
@@ -109,7 +108,7 @@
verify will warn if locked-in subrepo revisions are hidden or missing
$ hg ci -m "amended subrepo (again)"
- $ hg --config extensions.strip= --hidden strip -R subrepo -qr 'tip'
+ $ hg --config extensions.strip= --hidden strip -R subrepo -qr 'tip' --config devel.strip-obsmarkers=no
$ hg verify
checking changesets
checking manifests
--- a/tests/test-subrepo.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-subrepo.t Tue Jun 20 16:33:46 2017 -0400
@@ -680,6 +680,7 @@
$ cd ../t
$ hg up -C # discard our earlier merge
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "c373c8102e68: 12"
2 other heads for branch "default"
$ echo blah > t/t
$ hg ci -m13
@@ -694,6 +695,7 @@
$ hg up -C # discard changes
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "925c17564ef8: 13"
2 other heads for branch "default"
pull
@@ -736,6 +738,7 @@
adding file changes
added 1 changesets with 1 changes to 1 files
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "925c17564ef8: 13"
2 other heads for branch "default"
$ cat t/t
blah
@@ -1020,6 +1023,14 @@
$ hg cat sub/repo/foo
test
test
+ $ hg cat sub/repo/foo -Tjson | sed 's|\\\\|/|g'
+ [
+ {
+ "abspath": "foo",
+ "data": "test\ntest\n",
+ "path": "sub/repo/foo"
+ }
+ ]
$ mkdir -p tmp/sub/repo
$ hg cat -r 0 --output tmp/%p_p sub/repo/foo
$ cat tmp/sub/repo/foo_p
@@ -1204,6 +1215,7 @@
? s/c
$ hg update -C
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "925c17564ef8: 13"
2 other heads for branch "default"
$ hg status -S
? s/b
--- a/tests/test-tags.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-tags.t Tue Jun 20 16:33:46 2017 -0400
@@ -671,13 +671,11 @@
Missing tags2* files means the cache wasn't written through the normal mechanism.
$ ls tagsclient/.hg/cache
- branch2-served
+ branch2-base
checkisexec (execbit !)
checklink (symlink !)
checklink-target (symlink !)
hgtagsfnodes1
- rbc-names-v1
- rbc-revs-v1
Cache should contain the head only, even though other nodes have tags data
@@ -698,13 +696,11 @@
0.1 0:96ee1d7354c4
$ ls tagsclient/.hg/cache
- branch2-served
+ branch2-base
checkisexec (execbit !)
checklink (symlink !)
checklink-target (symlink !)
hgtagsfnodes1
- rbc-names-v1
- rbc-revs-v1
tags2-visible
$ f --size --hexdump tagsclient/.hg/cache/hgtagsfnodes1
@@ -716,3 +712,23 @@
0040: ff ff ff ff ff ff ff ff 40 f0 35 8c 19 e0 a7 d3 |........@.5.....|
0050: 8a 5c 6a 82 4d cf fb a5 87 d0 2f a3 1e 4f 2f 8a |.\j.M...../..O/.|
+Check that the bundle includes cache data
+
+ $ hg -R tagsclient bundle --all ./test-cache-in-bundle-all-rev.hg
+ 4 changesets found
+ $ hg debugbundle ./test-cache-in-bundle-all-rev.hg
+ Stream params: sortdict([('Compression', 'BZ')])
+ changegroup -- "sortdict([('version', '02'), ('nbchanges', '4')])"
+ 96ee1d7354c4ad7372047672c36a1f561e3a6a4c
+ c4dab0c2fd337eb9191f80c3024830a4889a8f34
+ f63cc8fe54e4d326f8d692805d70e092f851ddb1
+ 40f0358cb314c824a5929ee527308d90e023bc10
+ hgtagsfnodes -- 'sortdict()'
+
+Check that local clone includes cache data
+
+ $ hg clone tagsclient tags-local-clone
+ updating to branch default
+ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ (cd tags-local-clone/.hg/cache/; ls -1 tag*)
+ tags2-visible
--- a/tests/test-tools.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-tools.t Tue Jun 20 16:33:46 2017 -0400
@@ -44,10 +44,10 @@
#endif
#if no-windows
- $ python $TESTDIR/seq.py 10 > bar
+ $ $PYTHON $TESTDIR/seq.py 10 > bar
#else
Convert CRLF -> LF for consistency
- $ python $TESTDIR/seq.py 10 | sed "s/$//" > bar
+ $ $PYTHON $TESTDIR/seq.py 10 | sed "s/$//" > bar
#endif
#if unix-permissions symlink
--- a/tests/test-transplant.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-transplant.t Tue Jun 20 16:33:46 2017 -0400
@@ -419,6 +419,7 @@
$ hg up -C
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "e8643552fde5: foobar"
1 other heads for branch "default"
$ rm added
$ hg transplant --continue
--- a/tests/test-ui-config.py Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-ui-config.py Tue Jun 20 16:33:46 2017 -0400
@@ -32,6 +32,9 @@
'lists.list16="longer quotation" with "no ending quotation',
'lists.list17=this is \\" "not a quotation mark"',
'lists.list18=\n \n\nding\ndong',
+ 'date.epoch=0 0',
+ 'date.birth=2005-04-19T00:00:00',
+ 'date.invalid=0'
])
print(repr(testui.configitems('values')))
@@ -82,6 +85,9 @@
print(repr(testui.configlist('lists', 'unknown', 'foo, bar')))
print(repr(testui.configlist('lists', 'unknown', ['foo bar'])))
print(repr(testui.configlist('lists', 'unknown', ['foo', 'bar'])))
+print("---")
+print(repr(testui.configdate('date', 'epoch')))
+print(repr(testui.configdate('date', 'birth')))
print(repr(testui.config('values', 'String')))
@@ -101,3 +107,7 @@
testui.configint('values', 'intinvalid')
except error.ConfigError:
print('intinvalid')
+try:
+ testui.configdate('date', 'invalid')
+except error.ConfigError:
+ print('dateinvalid')
--- a/tests/test-ui-config.py.out Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-ui-config.py.out Tue Jun 20 16:33:46 2017 -0400
@@ -43,7 +43,11 @@
['foo', 'bar']
['foo bar']
['foo', 'bar']
+---
+(0, 0)
+(1113868800, 0)
None
True
boolinvalid
intinvalid
+dateinvalid
--- a/tests/test-up-local-change.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-up-local-change.t Tue Jun 20 16:33:46 2017 -0400
@@ -172,6 +172,7 @@
$ hg --debug up
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "1e71731e6fbb: 2"
1 other heads for branch "default"
test conflicting untracked files
--- a/tests/test-update-branches.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-update-branches.t Tue Jun 20 16:33:46 2017 -0400
@@ -94,6 +94,7 @@
$ norevtest 'none clean same' clean 2
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "bd10386d478c: 2"
1 other heads for branch "default"
parent=2
@@ -141,6 +142,7 @@
$ norevtest 'none dirty cross' dirty 2
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "bd10386d478c: 2"
1 other heads for branch "default"
parent=2
M foo
@@ -177,6 +179,7 @@
$ norevtest '-c clean same' clean 2 -c
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "bd10386d478c: 2"
1 other heads for branch "default"
parent=2
--- a/tests/test-verify.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-verify.t Tue Jun 20 16:33:46 2017 -0400
@@ -317,3 +317,47 @@
checking files
1 files, 1 changesets, 1 total revisions
$ cd ..
+
+test flag processor and skipflags
+
+ $ hg init skipflags
+ $ cd skipflags
+ $ cat >> .hg/hgrc <<EOF
+ > [extensions]
+ > flagprocessor=$RUNTESTDIR/flagprocessorext.py
+ > EOF
+ $ echo '[BASE64]content' > base64
+ $ hg commit -Aqm 'flag processor content' base64
+ $ hg verify
+ checking changesets
+ checking manifests
+ crosschecking files in changesets and manifests
+ checking files
+ 1 files, 1 changesets, 1 total revisions
+
+ $ cat >> $TESTTMP/break-base64.py <<EOF
+ > from __future__ import absolute_import
+ > import base64
+ > base64.b64decode=lambda x: x
+ > EOF
+ $ cat >> .hg/hgrc <<EOF
+ > breakbase64=$TESTTMP/break-base64.py
+ > EOF
+
+ $ hg verify
+ checking changesets
+ checking manifests
+ crosschecking files in changesets and manifests
+ checking files
+ base64@0: unpacking 794cee7777cb: integrity check failed on data/base64.i:0
+ 1 files, 1 changesets, 1 total revisions
+ 1 integrity errors encountered!
+ (first damaged changeset appears to be 0)
+ [1]
+ $ hg verify --config verify.skipflags=2147483647
+ checking changesets
+ checking manifests
+ crosschecking files in changesets and manifests
+ checking files
+ 1 files, 1 changesets, 1 total revisions
+
--- a/tests/test-walk.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-walk.t Tue Jun 20 16:33:46 2017 -0400
@@ -29,6 +29,7 @@
$ hg commit -m "commit #0"
$ hg debugwalk
+ matcher: <alwaysmatcher>
f beans/black beans/black
f beans/borlotti beans/borlotti
f beans/kidney beans/kidney
@@ -43,6 +44,7 @@
f mammals/Procyonidae/raccoon mammals/Procyonidae/raccoon
f mammals/skunk mammals/skunk
$ hg debugwalk -I.
+ matcher: <includematcher includes='(?:)'>
f beans/black beans/black
f beans/borlotti beans/borlotti
f beans/kidney beans/kidney
@@ -59,6 +61,7 @@
$ cd mammals
$ hg debugwalk
+ matcher: <alwaysmatcher>
f beans/black ../beans/black
f beans/borlotti ../beans/borlotti
f beans/kidney ../beans/kidney
@@ -73,6 +76,7 @@
f mammals/Procyonidae/raccoon Procyonidae/raccoon
f mammals/skunk skunk
$ hg debugwalk -X ../beans
+ matcher: <differencematcher m1=<alwaysmatcher>, m2=<includematcher includes='(?:beans(?:/|$))'>>
f fennel ../fennel
f fenugreek ../fenugreek
f fiddlehead ../fiddlehead
@@ -81,24 +85,31 @@
f mammals/Procyonidae/raccoon Procyonidae/raccoon
f mammals/skunk skunk
$ hg debugwalk -I '*k'
+ matcher: <includematcher includes='(?:mammals\\/[^/]*k(?:/|$))'>
f mammals/skunk skunk
$ hg debugwalk -I 'glob:*k'
+ matcher: <includematcher includes='(?:mammals\\/[^/]*k(?:/|$))'>
f mammals/skunk skunk
$ hg debugwalk -I 'relglob:*k'
+ matcher: <includematcher includes='(?:(?:|.*/)[^/]*k(?:/|$))'>
f beans/black ../beans/black
f fenugreek ../fenugreek
f mammals/skunk skunk
$ hg debugwalk -I 'relglob:*k' .
+ matcher: <intersectionmatcher m1=<patternmatcher patterns='(?:mammals(?:/|$))'>, m2=<includematcher includes='(?:(?:|.*/)[^/]*k(?:/|$))'>>
f mammals/skunk skunk
$ hg debugwalk -I 're:.*k$'
+ matcher: <includematcher includes='(?:.*k$)'>
f beans/black ../beans/black
f fenugreek ../fenugreek
f mammals/skunk skunk
$ hg debugwalk -I 'relre:.*k$'
+ matcher: <includematcher includes='(?:.*.*k$)'>
f beans/black ../beans/black
f fenugreek ../fenugreek
f mammals/skunk skunk
$ hg debugwalk -I 'path:beans'
+ matcher: <includematcher includes='(?:^beans(?:/|$))'>
f beans/black ../beans/black
f beans/borlotti ../beans/borlotti
f beans/kidney ../beans/kidney
@@ -106,6 +117,7 @@
f beans/pinto ../beans/pinto
f beans/turtle ../beans/turtle
$ hg debugwalk -I 'relpath:detour/../../beans'
+ matcher: <includematcher includes='(?:beans(?:/|$))'>
f beans/black ../beans/black
f beans/borlotti ../beans/borlotti
f beans/kidney ../beans/kidney
@@ -114,22 +126,27 @@
f beans/turtle ../beans/turtle
$ hg debugwalk 'rootfilesin:'
+ matcher: <patternmatcher patterns='(?:^[^/]+$)'>
f fennel ../fennel
f fenugreek ../fenugreek
f fiddlehead ../fiddlehead
$ hg debugwalk -I 'rootfilesin:'
+ matcher: <includematcher includes='(?:^[^/]+$)'>
f fennel ../fennel
f fenugreek ../fenugreek
f fiddlehead ../fiddlehead
$ hg debugwalk 'rootfilesin:.'
+ matcher: <patternmatcher patterns='(?:^[^/]+$)'>
f fennel ../fennel
f fenugreek ../fenugreek
f fiddlehead ../fiddlehead
$ hg debugwalk -I 'rootfilesin:.'
+ matcher: <includematcher includes='(?:^[^/]+$)'>
f fennel ../fennel
f fenugreek ../fenugreek
f fiddlehead ../fiddlehead
$ hg debugwalk -X 'rootfilesin:'
+ matcher: <differencematcher m1=<alwaysmatcher>, m2=<includematcher includes='(?:^[^/]+$)'>>
f beans/black ../beans/black
f beans/borlotti ../beans/borlotti
f beans/kidney ../beans/kidney
@@ -141,10 +158,15 @@
f mammals/Procyonidae/raccoon Procyonidae/raccoon
f mammals/skunk skunk
$ hg debugwalk 'rootfilesin:fennel'
+ matcher: <patternmatcher patterns='(?:^fennel/[^/]+$)'>
$ hg debugwalk -I 'rootfilesin:fennel'
+ matcher: <includematcher includes='(?:^fennel/[^/]+$)'>
$ hg debugwalk 'rootfilesin:skunk'
+ matcher: <patternmatcher patterns='(?:^skunk/[^/]+$)'>
$ hg debugwalk -I 'rootfilesin:skunk'
+ matcher: <includematcher includes='(?:^skunk/[^/]+$)'>
$ hg debugwalk 'rootfilesin:beans'
+ matcher: <patternmatcher patterns='(?:^beans/[^/]+$)'>
f beans/black ../beans/black
f beans/borlotti ../beans/borlotti
f beans/kidney ../beans/kidney
@@ -152,6 +174,7 @@
f beans/pinto ../beans/pinto
f beans/turtle ../beans/turtle
$ hg debugwalk -I 'rootfilesin:beans'
+ matcher: <includematcher includes='(?:^beans/[^/]+$)'>
f beans/black ../beans/black
f beans/borlotti ../beans/borlotti
f beans/kidney ../beans/kidney
@@ -159,14 +182,19 @@
f beans/pinto ../beans/pinto
f beans/turtle ../beans/turtle
$ hg debugwalk 'rootfilesin:mammals'
+ matcher: <patternmatcher patterns='(?:^mammals/[^/]+$)'>
f mammals/skunk skunk
$ hg debugwalk -I 'rootfilesin:mammals'
+ matcher: <includematcher includes='(?:^mammals/[^/]+$)'>
f mammals/skunk skunk
$ hg debugwalk 'rootfilesin:mammals/'
+ matcher: <patternmatcher patterns='(?:^mammals/[^/]+$)'>
f mammals/skunk skunk
$ hg debugwalk -I 'rootfilesin:mammals/'
+ matcher: <includematcher includes='(?:^mammals/[^/]+$)'>
f mammals/skunk skunk
$ hg debugwalk -X 'rootfilesin:mammals'
+ matcher: <differencematcher m1=<alwaysmatcher>, m2=<includematcher includes='(?:^mammals/[^/]+$)'>>
f beans/black ../beans/black
f beans/borlotti ../beans/borlotti
f beans/kidney ../beans/kidney
@@ -181,26 +209,31 @@
f mammals/Procyonidae/raccoon Procyonidae/raccoon
$ hg debugwalk .
+ matcher: <patternmatcher patterns='(?:mammals(?:/|$))'>
f mammals/Procyonidae/cacomistle Procyonidae/cacomistle
f mammals/Procyonidae/coatimundi Procyonidae/coatimundi
f mammals/Procyonidae/raccoon Procyonidae/raccoon
f mammals/skunk skunk
$ hg debugwalk -I.
+ matcher: <includematcher includes='(?:mammals(?:/|$))'>
f mammals/Procyonidae/cacomistle Procyonidae/cacomistle
f mammals/Procyonidae/coatimundi Procyonidae/coatimundi
f mammals/Procyonidae/raccoon Procyonidae/raccoon
f mammals/skunk skunk
$ hg debugwalk Procyonidae
+ matcher: <patternmatcher patterns='(?:mammals\\/Procyonidae(?:/|$))'>
f mammals/Procyonidae/cacomistle Procyonidae/cacomistle
f mammals/Procyonidae/coatimundi Procyonidae/coatimundi
f mammals/Procyonidae/raccoon Procyonidae/raccoon
$ cd Procyonidae
$ hg debugwalk .
+ matcher: <patternmatcher patterns='(?:mammals\\/Procyonidae(?:/|$))'>
f mammals/Procyonidae/cacomistle cacomistle
f mammals/Procyonidae/coatimundi coatimundi
f mammals/Procyonidae/raccoon raccoon
$ hg debugwalk ..
+ matcher: <patternmatcher patterns='(?:mammals(?:/|$))'>
f mammals/Procyonidae/cacomistle cacomistle
f mammals/Procyonidae/coatimundi coatimundi
f mammals/Procyonidae/raccoon raccoon
@@ -208,6 +241,7 @@
$ cd ..
$ hg debugwalk ../beans
+ matcher: <patternmatcher patterns='(?:beans(?:/|$))'>
f beans/black ../beans/black
f beans/borlotti ../beans/borlotti
f beans/kidney ../beans/kidney
@@ -215,6 +249,7 @@
f beans/pinto ../beans/pinto
f beans/turtle ../beans/turtle
$ hg debugwalk .
+ matcher: <patternmatcher patterns='(?:mammals(?:/|$))'>
f mammals/Procyonidae/cacomistle Procyonidae/cacomistle
f mammals/Procyonidae/coatimundi Procyonidae/coatimundi
f mammals/Procyonidae/raccoon Procyonidae/raccoon
@@ -228,6 +263,7 @@
$ cd ..
$ hg debugwalk -Ibeans
+ matcher: <includematcher includes='(?:beans(?:/|$))'>
f beans/black beans/black
f beans/borlotti beans/borlotti
f beans/kidney beans/kidney
@@ -235,41 +271,56 @@
f beans/pinto beans/pinto
f beans/turtle beans/turtle
$ hg debugwalk -I '{*,{b,m}*/*}k'
+ matcher: <includematcher includes='(?:(?:[^/]*|(?:b|m)[^/]*\\/[^/]*)k(?:/|$))'>
f beans/black beans/black
f fenugreek fenugreek
f mammals/skunk mammals/skunk
$ hg debugwalk -Ibeans mammals
+ matcher: <intersectionmatcher m1=<patternmatcher patterns='(?:mammals(?:/|$))'>, m2=<includematcher includes='(?:beans(?:/|$))'>>
$ hg debugwalk -Inon-existent
+ matcher: <includematcher includes='(?:non\\-existent(?:/|$))'>
$ hg debugwalk -Inon-existent -Ibeans/black
+ matcher: <includematcher includes='(?:non\\-existent(?:/|$)|beans\\/black(?:/|$))'>
f beans/black beans/black
$ hg debugwalk -Ibeans beans/black
+ matcher: <intersectionmatcher m1=<patternmatcher patterns='(?:beans\\/black(?:/|$))'>, m2=<includematcher includes='(?:beans(?:/|$))'>>
f beans/black beans/black exact
$ hg debugwalk -Ibeans/black beans
+ matcher: <intersectionmatcher m1=<patternmatcher patterns='(?:beans(?:/|$))'>, m2=<includematcher includes='(?:beans\\/black(?:/|$))'>>
f beans/black beans/black
$ hg debugwalk -Xbeans/black beans
+ matcher: <differencematcher m1=<patternmatcher patterns='(?:beans(?:/|$))'>, m2=<includematcher includes='(?:beans\\/black(?:/|$))'>>
f beans/borlotti beans/borlotti
f beans/kidney beans/kidney
f beans/navy beans/navy
f beans/pinto beans/pinto
f beans/turtle beans/turtle
$ hg debugwalk -Xbeans/black -Ibeans
+ matcher: <differencematcher m1=<includematcher includes='(?:beans(?:/|$))'>, m2=<includematcher includes='(?:beans\\/black(?:/|$))'>>
f beans/borlotti beans/borlotti
f beans/kidney beans/kidney
f beans/navy beans/navy
f beans/pinto beans/pinto
f beans/turtle beans/turtle
$ hg debugwalk -Xbeans/black beans/black
+ matcher: <differencematcher m1=<patternmatcher patterns='(?:beans\\/black(?:/|$))'>, m2=<includematcher includes='(?:beans\\/black(?:/|$))'>>
f beans/black beans/black exact
$ hg debugwalk -Xbeans/black -Ibeans/black
+ matcher: <differencematcher m1=<includematcher includes='(?:beans\\/black(?:/|$))'>, m2=<includematcher includes='(?:beans\\/black(?:/|$))'>>
$ hg debugwalk -Xbeans beans/black
+ matcher: <differencematcher m1=<patternmatcher patterns='(?:beans\\/black(?:/|$))'>, m2=<includematcher includes='(?:beans(?:/|$))'>>
f beans/black beans/black exact
$ hg debugwalk -Xbeans -Ibeans/black
+ matcher: <differencematcher m1=<includematcher includes='(?:beans\\/black(?:/|$))'>, m2=<includematcher includes='(?:beans(?:/|$))'>>
$ hg debugwalk 'glob:mammals/../beans/b*'
+ matcher: <patternmatcher patterns='(?:beans\\/b[^/]*$)'>
f beans/black beans/black
f beans/borlotti beans/borlotti
$ hg debugwalk '-X*/Procyonidae' mammals
+ matcher: <differencematcher m1=<patternmatcher patterns='(?:mammals(?:/|$))'>, m2=<includematcher includes='(?:[^/]*\\/Procyonidae(?:/|$))'>>
f mammals/skunk mammals/skunk
$ hg debugwalk path:mammals
+ matcher: <patternmatcher patterns='(?:^mammals(?:/|$))'>
f mammals/Procyonidae/cacomistle mammals/Procyonidae/cacomistle
f mammals/Procyonidae/coatimundi mammals/Procyonidae/coatimundi
f mammals/Procyonidae/raccoon mammals/Procyonidae/raccoon
@@ -296,6 +347,7 @@
Test absolute paths:
$ hg debugwalk `pwd`/beans
+ matcher: <patternmatcher patterns='(?:beans(?:/|$))'>
f beans/black beans/black
f beans/borlotti beans/borlotti
f beans/kidney beans/kidney
@@ -309,6 +361,7 @@
Test patterns:
$ hg debugwalk glob:\*
+ matcher: <patternmatcher patterns='(?:[^/]*$)'>
f fennel fennel
f fenugreek fenugreek
f fiddlehead fiddlehead
@@ -318,15 +371,19 @@
adding glob:glob
warning: filename contains ':', which is reserved on Windows: 'glob:glob'
$ hg debugwalk glob:\*
+ matcher: <patternmatcher patterns='(?:[^/]*$)'>
f fennel fennel
f fenugreek fenugreek
f fiddlehead fiddlehead
f glob:glob glob:glob
$ hg debugwalk glob:glob
+ matcher: <patternmatcher patterns='(?:glob$)'>
glob: No such file or directory
$ hg debugwalk glob:glob:glob
+ matcher: <patternmatcher patterns='(?:glob\\:glob$)'>
f glob:glob glob:glob exact
$ hg debugwalk path:glob:glob
+ matcher: <patternmatcher patterns='(?:^glob\\:glob(?:/|$))'>
f glob:glob glob:glob exact
$ rm glob:glob
$ hg addremove
@@ -334,30 +391,38 @@
#endif
$ hg debugwalk 'glob:**e'
+ matcher: <patternmatcher patterns='(?:.*e$)'>
f beans/turtle beans/turtle
f mammals/Procyonidae/cacomistle mammals/Procyonidae/cacomistle
$ hg debugwalk 're:.*[kb]$'
+ matcher: <patternmatcher patterns='(?:.*[kb]$)'>
f beans/black beans/black
f fenugreek fenugreek
f mammals/skunk mammals/skunk
$ hg debugwalk path:beans/black
+ matcher: <patternmatcher patterns='(?:^beans\\/black(?:/|$))'>
f beans/black beans/black exact
$ hg debugwalk path:beans//black
+ matcher: <patternmatcher patterns='(?:^beans\\/black(?:/|$))'>
f beans/black beans/black exact
$ hg debugwalk relglob:Procyonidae
+ matcher: <patternmatcher patterns='(?:(?:|.*/)Procyonidae$)'>
$ hg debugwalk 'relglob:Procyonidae/**'
+ matcher: <patternmatcher patterns='(?:(?:|.*/)Procyonidae\\/.*$)'>
f mammals/Procyonidae/cacomistle mammals/Procyonidae/cacomistle
f mammals/Procyonidae/coatimundi mammals/Procyonidae/coatimundi
f mammals/Procyonidae/raccoon mammals/Procyonidae/raccoon
$ hg debugwalk 'relglob:Procyonidae/**' fennel
+ matcher: <patternmatcher patterns='(?:(?:|.*/)Procyonidae\\/.*$|fennel(?:/|$))'>
f fennel fennel exact
f mammals/Procyonidae/cacomistle mammals/Procyonidae/cacomistle
f mammals/Procyonidae/coatimundi mammals/Procyonidae/coatimundi
f mammals/Procyonidae/raccoon mammals/Procyonidae/raccoon
$ hg debugwalk beans 'glob:beans/*'
+ matcher: <patternmatcher patterns='(?:beans(?:/|$)|beans\\/[^/]*$)'>
f beans/black beans/black
f beans/borlotti beans/borlotti
f beans/kidney beans/kidney
@@ -365,63 +430,78 @@
f beans/pinto beans/pinto
f beans/turtle beans/turtle
$ hg debugwalk 'glob:mamm**'
+ matcher: <patternmatcher patterns='(?:mamm.*$)'>
f mammals/Procyonidae/cacomistle mammals/Procyonidae/cacomistle
f mammals/Procyonidae/coatimundi mammals/Procyonidae/coatimundi
f mammals/Procyonidae/raccoon mammals/Procyonidae/raccoon
f mammals/skunk mammals/skunk
$ hg debugwalk 'glob:mamm**' fennel
+ matcher: <patternmatcher patterns='(?:mamm.*$|fennel(?:/|$))'>
f fennel fennel exact
f mammals/Procyonidae/cacomistle mammals/Procyonidae/cacomistle
f mammals/Procyonidae/coatimundi mammals/Procyonidae/coatimundi
f mammals/Procyonidae/raccoon mammals/Procyonidae/raccoon
f mammals/skunk mammals/skunk
$ hg debugwalk 'glob:j*'
+ matcher: <patternmatcher patterns='(?:j[^/]*$)'>
$ hg debugwalk NOEXIST
+ matcher: <patternmatcher patterns='(?:NOEXIST(?:/|$))'>
NOEXIST: * (glob)
#if fifo
$ mkfifo fifo
$ hg debugwalk fifo
+ matcher: <patternmatcher patterns='(?:fifo(?:/|$))'>
fifo: unsupported file type (type is fifo)
#endif
$ rm fenugreek
$ hg debugwalk fenugreek
+ matcher: <patternmatcher patterns='(?:fenugreek(?:/|$))'>
f fenugreek fenugreek exact
$ hg rm fenugreek
$ hg debugwalk fenugreek
+ matcher: <patternmatcher patterns='(?:fenugreek(?:/|$))'>
f fenugreek fenugreek exact
$ touch new
$ hg debugwalk new
+ matcher: <patternmatcher patterns='(?:new(?:/|$))'>
f new new exact
$ mkdir ignored
$ touch ignored/file
$ echo '^ignored$' > .hgignore
$ hg debugwalk ignored
+ matcher: <patternmatcher patterns='(?:ignored(?:/|$))'>
$ hg debugwalk ignored/file
+ matcher: <patternmatcher patterns='(?:ignored\\/file(?:/|$))'>
f ignored/file ignored/file exact
Test listfile and listfile0
$ $PYTHON -c "file('listfile0', 'wb').write('fenugreek\0new\0')"
$ hg debugwalk -I 'listfile0:listfile0'
+ matcher: <includematcher includes='(?:fenugreek(?:/|$)|new(?:/|$))'>
f fenugreek fenugreek
f new new
$ $PYTHON -c "file('listfile', 'wb').write('fenugreek\nnew\r\nmammals/skunk\n')"
$ hg debugwalk -I 'listfile:listfile'
+ matcher: <includematcher includes='(?:fenugreek(?:/|$)|new(?:/|$)|mammals\\/skunk(?:/|$))'>
f fenugreek fenugreek
f mammals/skunk mammals/skunk
f new new
$ cd ..
$ hg debugwalk -R t t/mammals/skunk
+ matcher: <patternmatcher patterns='(?:mammals\\/skunk(?:/|$))'>
f mammals/skunk t/mammals/skunk exact
$ mkdir t2
$ cd t2
$ hg debugwalk -R ../t ../t/mammals/skunk
+ matcher: <patternmatcher patterns='(?:mammals\\/skunk(?:/|$))'>
f mammals/skunk ../t/mammals/skunk exact
$ hg debugwalk --cwd ../t mammals/skunk
+ matcher: <patternmatcher patterns='(?:mammals\\/skunk(?:/|$))'>
f mammals/skunk mammals/skunk exact
$ cd ..
@@ -432,7 +512,7 @@
$ echo fennel > overflow.list
$ $PYTHON -c "for i in xrange(20000 / 100): print 'x' * 100" >> overflow.list
$ echo fenugreek >> overflow.list
- $ hg debugwalk 'listfile:overflow.list' 2>&1 | grep -v '^xxx'
+ $ hg debugwalk 'listfile:overflow.list' 2>&1 | egrep -v '(^matcher: |^xxx)'
f fennel fennel exact
f fenugreek fenugreek exact
$ cd ..
--- a/tests/test-win32text.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-win32text.t Tue Jun 20 16:33:46 2017 -0400
@@ -28,7 +28,7 @@
updating to branch default
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ cp .hg/hgrc ../zoz/.hg
- $ python unix2dos.py f
+ $ $PYTHON unix2dos.py f
commit should fail
@@ -102,7 +102,7 @@
$ mkdir d
$ echo hello > d/f2
- $ python unix2dos.py d/f2
+ $ $PYTHON unix2dos.py d/f2
$ hg add d/f2
$ hg ci -m 3
attempt to commit or push text file(s) using CRLF line endings
@@ -181,7 +181,7 @@
adding dupe/b (glob)
adding dupe/c (glob)
adding dupe/d (glob)
- $ python unix2dos.py dupe/b dupe/c dupe/d
+ $ $PYTHON unix2dos.py dupe/b dupe/c dupe/d
$ hg -R dupe ci -m a dupe/a
$ hg -R dupe ci -m b/c dupe/[bc]
$ hg -R dupe ci -m d dupe/d
--- a/tests/test-worker.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-worker.t Tue Jun 20 16:33:46 2017 -0400
@@ -2,9 +2,10 @@
$ cat > t.py <<EOF
> from __future__ import absolute_import, print_function
+ > import time
> from mercurial import (
- > cmdutil,
> error,
+ > registrar,
> ui as uimod,
> worker,
> )
@@ -22,13 +23,14 @@
> for arg in args:
> ui.status('run\n')
> yield 1, arg
+ > time.sleep(0.1) # easier to trigger killworkers code path
> functable = {
> 'abort': abort,
> 'exc': exc,
> 'runme': runme,
> }
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
> @command('test', [], 'hg test [COST] [FUNC]')
> def t(ui, repo, cost=1.0, func='runme'):
> cost = float(cost)
@@ -74,21 +76,53 @@
Known exception should be caught, but printed if --traceback is enabled
- $ hg --config "extensions.t=$abspath" --config 'worker.numcpus=2' \
- > test 100000.0 abort
+ $ hg --config "extensions.t=$abspath" --config 'worker.numcpus=8' \
+ > test 100000.0 abort 2>&1
start
abort: known exception
[255]
- $ hg --config "extensions.t=$abspath" --config 'worker.numcpus=2' \
- > test 100000.0 abort --traceback 2>&1 | grep '^Traceback'
- Traceback (most recent call last):
- Traceback (most recent call last):
+ $ hg --config "extensions.t=$abspath" --config 'worker.numcpus=8' \
+ > test 100000.0 abort --traceback 2>&1 | egrep '^(SystemExit|Abort)'
+ Abort: known exception
+ SystemExit: 255
Traceback must be printed for unknown exceptions
- $ hg --config "extensions.t=$abspath" --config 'worker.numcpus=2' \
- > test 100000.0 exc 2>&1 | grep '^Traceback'
- Traceback (most recent call last):
+ $ hg --config "extensions.t=$abspath" --config 'worker.numcpus=8' \
+ > test 100000.0 exc 2>&1 | grep '^Exception'
+ Exception: unknown exception
+
+Workers should not do cleanups in all cases
+
+ $ cat > $TESTTMP/detectcleanup.py <<EOF
+ > from __future__ import absolute_import
+ > import atexit
+ > import os
+ > import time
+ > oldfork = os.fork
+ > count = 0
+ > parentpid = os.getpid()
+ > def delayedfork():
+ > global count
+ > count += 1
+ > pid = oldfork()
+ > # make it easier to test SIGTERM hitting other workers when they have
+ > # not set up error handling yet.
+ > if count > 1 and pid == 0:
+ > time.sleep(0.1)
+ > return pid
+ > os.fork = delayedfork
+ > def cleanup():
+ > if os.getpid() != parentpid:
+ > os.write(1, 'should never happen\n')
+ > atexit.register(cleanup)
+ > EOF
+
+ $ hg --config "extensions.t=$abspath" --config worker.numcpus=8 --config \
+ > "extensions.d=$TESTTMP/detectcleanup.py" test 100000 abort
+ start
+ abort: known exception
+ [255]
#endif
--- a/tests/test-xdg.t Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/test-xdg.t Tue Jun 20 16:33:46 2017 -0400
@@ -5,7 +5,7 @@
$ echo 'username = foobar' >> xdgconf/hg/hgrc
$ XDG_CONFIG_HOME="`pwd`/xdgconf" ; export XDG_CONFIG_HOME
$ unset HGRCPATH
- $ hg config ui.username
+ $ hg config ui.username 2>/dev/null
foobar
#endif
--- a/tests/testlib/exchange-obsmarker-util.sh Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/testlib/exchange-obsmarker-util.sh Tue Jun 20 16:33:46 2017 -0400
@@ -32,6 +32,9 @@
# we need to strip some changeset for some test cases
hgext.strip=
+[devel]
+strip-obsmarkers = no
+
[alias]
# fix date used to create obsolete markers.
debugobsolete=debugobsolete -d '0 0'
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/testlib/obsmarker-common.sh Tue Jun 20 16:33:46 2017 -0400
@@ -0,0 +1,14 @@
+mkcommit() {
+ echo "$1" > "$1"
+ hg add "$1"
+ hg ci -m "$1"
+}
+
+getid() {
+ hg log --hidden --template '{node}\n' --rev "$1"
+}
+
+cat >> $HGRCPATH <<EOF
+[alias]
+debugobsolete=debugobsolete -d '0 0'
+EOF
--- a/tests/tinyproxy.py Tue Jun 13 22:24:41 2017 -0400
+++ b/tests/tinyproxy.py Tue Jun 20 16:33:46 2017 -0400
@@ -53,6 +53,9 @@
self.log_message('"%s" %s %s%s',
self.requestline, str(code), str(size),
''.join([' %s:%s' % h for h in sorted(xheaders)]))
+ # Flush for Windows, so output isn't lost on TerminateProcess()
+ sys.stdout.flush()
+ sys.stderr.flush()
def _connect_to(self, netloc, soc):
i = netloc.find(':')