--- a/Makefile Mon Sep 18 10:54:00 2017 -0700
+++ b/Makefile Sat Sep 30 07:52:48 2017 -0700
@@ -132,6 +132,7 @@
mercurial/templater.py \
mercurial/filemerge.py \
mercurial/hgweb/webcommands.py \
+ mercurial/util.py \
$(DOCFILES) > i18n/hg.pot.tmp
# All strings marked for translation in Mercurial contain
# ASCII characters only. But some files contain string
--- a/contrib/buildrpm Mon Sep 18 10:54:00 2017 -0700
+++ b/contrib/buildrpm Sat Sep 30 07:52:48 2017 -0700
@@ -11,6 +11,8 @@
BUILD=1
RPMBUILDDIR="$PWD/rpmbuild"
+export HGPLAIN=
+
while [ "$1" ]; do
case "$1" in
--prepare )
--- a/contrib/check-code.py Mon Sep 18 10:54:00 2017 -0700
+++ b/contrib/check-code.py Sat Sep 30 07:52:48 2017 -0700
@@ -119,7 +119,9 @@
(r'\[[^\]]+==', '[ foo == bar ] is a bashism, use [ foo = bar ] instead'),
(r'(^|\|\s*)grep (-\w\s+)*[^|]*[(|]\w',
"use egrep for extended grep syntax"),
- (r'/bin/', "don't use explicit paths for tools"),
+ (r'(^|\|\s*)e?grep .*\\S', "don't use \\S in regular expression"),
+ (r'(?<!!)/bin/', "don't use explicit paths for tools"),
+ (r'#!.*/bash', "don't use bash in shebang, use sh"),
(r'[^\n]\Z', "no trailing newline"),
(r'export .*=', "don't export and assign at once"),
(r'^source\b', "don't use 'source', use '.'"),
@@ -159,7 +161,7 @@
]
testfilters = [
- (r"( *)(#([^\n]*\S)?)", repcomment),
+ (r"( *)(#([^!][^\n]*\S)?)", repcomment),
(r"<<(\S+)((.|\n)*?\n\1)", rephere),
]
@@ -232,7 +234,7 @@
utestfilters = [
(r"<<(\S+)((.|\n)*?\n > \1)", rephere),
- (r"( +)(#([^\n]*\S)?)", repcomment),
+ (r"( +)(#([^!][^\n]*\S)?)", repcomment),
]
pypats = [
@@ -260,7 +262,7 @@
(r'(\S[ \t]+|^[ \t]+)\n', "trailing whitespace"),
# (r'^\s+[^_ \n][^_. \n]+_[^_\n]+\s*=',
# "don't use underbars in identifiers"),
- (r'^\s+(self\.)?[A-za-z][a-z0-9]+[A-Z]\w* = ',
+ (r'^\s+(self\.)?[A-Za-z][a-z0-9]+[A-Z]\w* = ',
"don't use camelcase in identifiers"),
(r'^\s*(if|while|def|class|except|try)\s[^[\n]*:\s*[^\\n]#\s]+',
"linebreak after :"),
--- a/contrib/chg/util.c Mon Sep 18 10:54:00 2017 -0700
+++ b/contrib/chg/util.c Sat Sep 30 07:52:48 2017 -0700
@@ -14,6 +14,7 @@
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
+#include <sys/time.h>
#include <sys/types.h>
#include <sys/wait.h>
#include <unistd.h>
@@ -59,6 +60,13 @@
}
static int debugmsgenabled = 0;
+static double debugstart = 0;
+
+static double now() {
+ struct timeval t;
+ gettimeofday(&t, NULL);
+ return t.tv_usec / 1e6 + t.tv_sec;
+}
void enablecolor(void)
{
@@ -68,6 +76,7 @@
void enabledebugmsg(void)
{
debugmsgenabled = 1;
+ debugstart = now();
}
void debugmsg(const char *fmt, ...)
@@ -78,7 +87,7 @@
va_list args;
va_start(args, fmt);
fsetcolor(stderr, "1;30");
- fputs("chg: debug: ", stderr);
+ fprintf(stderr, "chg: debug: %4.6f ", now() - debugstart);
vfprintf(stderr, fmt, args);
fsetcolor(stderr, "");
fputc('\n', stderr);
--- a/contrib/hg-ssh Mon Sep 18 10:54:00 2017 -0700
+++ b/contrib/hg-ssh Sat Sep 30 07:52:48 2017 -0700
@@ -28,13 +28,19 @@
You can also add a --read-only flag to allow read-only access to a key, e.g.:
command="hg-ssh --read-only repos/*"
"""
+from __future__ import absolute_import
+
+import os
+import shlex
+import sys
# enable importing on demand to reduce startup time
-from mercurial import demandimport; demandimport.enable()
+import hgdemandimport ; hgdemandimport.enable()
-from mercurial import dispatch, ui as uimod
-
-import sys, os, shlex
+from mercurial import (
+ dispatch,
+ ui as uimod,
+)
def main():
cwd = os.getcwd()
--- a/contrib/hgperf Mon Sep 18 10:54:00 2017 -0700
+++ b/contrib/hgperf Sat Sep 30 07:52:48 2017 -0700
@@ -52,18 +52,20 @@
sys.stderr.write("(check your install and PYTHONPATH)\n")
sys.exit(-1)
-import mercurial.util
-import mercurial.dispatch
+from mercurial import (
+ dispatch,
+ util,
+)
def timer(func, title=None):
results = []
- begin = mercurial.util.timer()
+ begin = util.timer()
count = 0
while True:
ostart = os.times()
- cstart = mercurial.util.timer()
+ cstart = util.timer()
r = func()
- cstop = mercurial.util.timer()
+ cstop = util.timer()
ostop = os.times()
count += 1
a, b = ostart, ostop
@@ -80,7 +82,7 @@
sys.stderr.write("! wall %f comb %f user %f sys %f (best of %d)\n"
% (m[0], m[1] + m[2], m[1], m[2], count))
-orgruncommand = mercurial.dispatch.runcommand
+orgruncommand = dispatch.runcommand
def runcommand(lui, repo, cmd, fullargs, ui, options, d, cmdpats, cmdoptions):
ui.pushbuffer()
@@ -90,9 +92,9 @@
ui.popbuffer()
lui.popbuffer()
-mercurial.dispatch.runcommand = runcommand
+dispatch.runcommand = runcommand
for fp in (sys.stdin, sys.stdout, sys.stderr):
- mercurial.util.setbinary(fp)
+ util.setbinary(fp)
-mercurial.dispatch.run()
+dispatch.run()
--- a/contrib/import-checker.py Mon Sep 18 10:54:00 2017 -0700
+++ b/contrib/import-checker.py Sat Sep 30 07:52:48 2017 -0700
@@ -12,12 +12,18 @@
# to work when run from a virtualenv. The modules were chosen empirically
# so that the return value matches the return value without virtualenv.
if True: # disable lexical sorting checks
- import BaseHTTPServer
+ try:
+ import BaseHTTPServer as basehttpserver
+ except ImportError:
+ basehttpserver = None
import zlib
# Whitelist of modules that symbols can be directly imported from.
allowsymbolimports = (
'__future__',
+ 'bzrlib',
+ 'hgclient',
+ 'mercurial',
'mercurial.hgweb.common',
'mercurial.hgweb.request',
'mercurial.i18n',
@@ -144,6 +150,8 @@
>>> fromlocal2('bar', 2)
('foo.bar', 'foo.bar.__init__', True)
"""
+ if not isinstance(modulename, str):
+ modulename = modulename.decode('ascii')
prefix = '.'.join(modulename.split('.')[:-1])
if prefix:
prefix += '.'
@@ -183,8 +191,9 @@
def list_stdlib_modules():
"""List the modules present in the stdlib.
+ >>> py3 = sys.version_info[0] >= 3
>>> mods = set(list_stdlib_modules())
- >>> 'BaseHTTPServer' in mods
+ >>> 'BaseHTTPServer' in mods or py3
True
os.path isn't really a module, so it's missing:
@@ -201,7 +210,7 @@
>>> 'collections' in mods
True
- >>> 'cStringIO' in mods
+ >>> 'cStringIO' in mods or py3
True
>>> 'cffi' in mods
@@ -213,7 +222,11 @@
# consider them stdlib.
for m in ['msvcrt', '_winreg']:
yield m
+ yield '__builtin__'
yield 'builtins' # python3 only
+ yield 'importlib.abc' # python3 only
+ yield 'importlib.machinery' # python3 only
+ yield 'importlib.util' # python3 only
for m in 'fcntl', 'grp', 'pwd', 'termios': # Unix only
yield m
for m in 'cPickle', 'datetime': # in Python (not C) on PyPy
@@ -223,7 +236,9 @@
stdlib_prefixes = {sys.prefix, sys.exec_prefix}
# We need to supplement the list of prefixes for the search to work
# when run from within a virtualenv.
- for mod in (BaseHTTPServer, zlib):
+ for mod in (basehttpserver, zlib):
+ if mod is None:
+ continue
try:
# Not all module objects have a __file__ attribute.
filename = mod.__file__
@@ -396,10 +411,13 @@
assign the symbol to a module-level variable. In addition, these imports
must be performed before other local imports. This rule only
applies to import statements outside of any blocks.
- * Relative imports from the standard library are not allowed.
+ * Relative imports from the standard library are not allowed, unless that
+ library is also a local module.
* Certain modules must be aliased to alternate names to avoid aliasing
and readability problems. See `requirealias`.
"""
+ if not isinstance(module, str):
+ module = module.decode('ascii')
topmodule = module.split('.')[0]
fromlocal = fromlocalfunc(module, localmods)
@@ -476,7 +494,10 @@
# __future__ is special since it needs to come first and use
# symbol import.
if fullname != '__future__':
- if not fullname or fullname in stdlib_modules:
+ if not fullname or (
+ fullname in stdlib_modules
+ and fullname not in localmods
+ and fullname + '.__init__' not in localmods):
yield msg('relative import of stdlib module')
else:
seenlocal = fullname
@@ -610,22 +631,26 @@
def embedded(f, modname, src):
"""Extract embedded python code
+ >>> def _forcestr(thing):
+ ... if not isinstance(thing, str):
+ ... return thing.decode('ascii')
+ ... return thing
>>> def test(fn, lines):
- ... for s, m, f, l in embedded(fn, "example", lines):
- ... print("%s %s %s" % (m, f, l))
- ... print(repr(s))
+ ... for s, m, f, l in embedded(fn, b"example", lines):
+ ... print("%s %s %d" % (_forcestr(m), _forcestr(f), l))
+ ... print(repr(_forcestr(s)))
>>> lines = [
- ... 'comment',
- ... ' >>> from __future__ import print_function',
- ... " >>> ' multiline",
- ... " ... string'",
- ... ' ',
- ... 'comment',
- ... ' $ cat > foo.py <<EOF',
- ... ' > from __future__ import print_function',
- ... ' > EOF',
+ ... b'comment',
+ ... b' >>> from __future__ import print_function',
+ ... b" >>> ' multiline",
+ ... b" ... string'",
+ ... b' ',
+ ... b'comment',
+ ... b' $ cat > foo.py <<EOF',
+ ... b' > from __future__ import print_function',
+ ... b' > EOF',
... ]
- >>> test("example.t", lines)
+ >>> test(b"example.t", lines)
example[2] doctest.py 2
"from __future__ import print_function\\n' multiline\\nstring'\\n"
example[7] foo.py 7
@@ -647,16 +672,16 @@
if not inlinepython:
# We've just entered a Python block.
inlinepython = n
- t = 'doctest.py'
+ t = b'doctest.py'
script.append(l[prefix:])
continue
if l.startswith(b' ... '): # python inlines
script.append(l[prefix:])
continue
- cat = re.search(r"\$ \s*cat\s*>\s*(\S+\.py)\s*<<\s*EOF", l)
+ cat = re.search(br"\$ \s*cat\s*>\s*(\S+\.py)\s*<<\s*EOF", l)
if cat:
if inlinepython:
- yield ''.join(script), ("%s[%d]" %
+ yield b''.join(script), (b"%s[%d]" %
(modname, inlinepython)), t, inlinepython
script = []
inlinepython = 0
@@ -665,15 +690,18 @@
continue
if shpython and l.startswith(b' > '): # sh continuation
if l == b' > EOF\n':
- yield ''.join(script), ("%s[%d]" %
+ yield b''.join(script), (b"%s[%d]" %
(modname, shpython)), t, shpython
script = []
shpython = 0
else:
script.append(l[4:])
continue
- if inlinepython and l == b' \n':
- yield ''.join(script), ("%s[%d]" %
+ # If we have an empty line or a command for sh, we end the
+ # inline script.
+ if inlinepython and (l == b' \n'
+ or l.startswith(b' $ ')):
+ yield b''.join(script), (b"%s[%d]" %
(modname, inlinepython)), t, inlinepython
script = []
inlinepython = 0
@@ -691,11 +719,11 @@
"""
py = False
if not f.endswith('.t'):
- with open(f) as src:
+ with open(f, 'rb') as src:
yield src.read(), modname, f, 0
py = True
if py or f.endswith('.t'):
- with open(f) as src:
+ with open(f, 'rb') as src:
for script, modname, t, line in embedded(f, modname, src):
yield script, modname, t, line
@@ -714,6 +742,9 @@
localmodpaths[modname] = source_path
localmods = populateextmods(localmodpaths)
for localmodname, source_path in sorted(localmodpaths.items()):
+ if not isinstance(localmodname, bytes):
+ # This is only safe because all hg's files are ascii
+ localmodname = localmodname.encode('ascii')
for src, modname, name, line in sources(source_path, localmodname):
try:
used_imports[modname] = sorted(
--- a/contrib/mercurial.spec Mon Sep 18 10:54:00 2017 -0700
+++ b/contrib/mercurial.spec Sat Sep 30 07:52:48 2017 -0700
@@ -83,6 +83,7 @@
%endif
make all
+make -C contrib/chg
%install
rm -rf $RPM_BUILD_ROOT
@@ -111,6 +112,7 @@
%endif
+install -m 755 contrib/chg/chg $RPM_BUILD_ROOT%{_bindir}/
install -m 755 contrib/hgk $RPM_BUILD_ROOT%{_bindir}/
install -m 755 contrib/hg-ssh $RPM_BUILD_ROOT%{_bindir}/
@@ -143,6 +145,7 @@
%{_datadir}/emacs/site-lisp/mercurial.el
%{_datadir}/emacs/site-lisp/mq.el
%{_bindir}/hg
+%{_bindir}/chg
%{_bindir}/hgk
%{_bindir}/hg-ssh
%dir %{_sysconfdir}/bash_completion.d/
--- a/contrib/perf.py Mon Sep 18 10:54:00 2017 -0700
+++ b/contrib/perf.py Sat Sep 30 07:52:48 2017 -0700
@@ -370,15 +370,9 @@
@command('perfwalk', formatteropts)
def perfwalk(ui, repo, *pats, **opts):
timer, fm = gettimer(ui, opts)
- try:
- m = scmutil.match(repo[None], pats, {})
- timer(lambda: len(list(repo.dirstate.walk(m, [], True, False))))
- except Exception:
- try:
- m = scmutil.match(repo[None], pats, {})
- timer(lambda: len([b for a, b, c in repo.dirstate.statwalk([], m)]))
- except Exception:
- timer(lambda: len(list(cmdutil.walk(repo, pats, {}))))
+ m = scmutil.match(repo[None], pats, {})
+ timer(lambda: len(list(repo.dirstate.walk(m, subrepos=[], unknown=True,
+ ignored=False))))
fm.end()
@command('perfannotate', formatteropts)
--- a/contrib/phabricator.py Mon Sep 18 10:54:00 2017 -0700
+++ b/contrib/phabricator.py Sat Sep 30 07:52:48 2017 -0700
@@ -7,9 +7,9 @@
"""simple Phabricator integration
This extension provides a ``phabsend`` command which sends a stack of
-changesets to Phabricator without amending commit messages, and a ``phabread``
-command which prints a stack of revisions in a format suitable
-for :hg:`import`.
+changesets to Phabricator, and a ``phabread`` command which prints a stack of
+revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
+to update statuses in batch.
By default, Phabricator requires ``Test Plan`` which might prevent some
changeset from being sent. The requirement could be disabled by changing
@@ -28,23 +28,34 @@
# callsign is "FOO".
callsign = FOO
+ # curl command to use. If not set (default), use builtin HTTP library to
+ # communicate. If set, use the specified curl command. This could be useful
+ # if you need to specify advanced options that is not easily supported by
+ # the internal library.
+ curlcmd = curl --connect-timeout 2 --retry 3 --silent
"""
from __future__ import absolute_import
+import itertools
import json
+import operator
import re
from mercurial.node import bin, nullid
from mercurial.i18n import _
from mercurial import (
+ cmdutil,
+ context,
encoding,
error,
mdiff,
- obsolete,
+ obsutil,
+ parser,
patch,
registrar,
scmutil,
+ smartset,
tags,
url as urlmod,
util,
@@ -53,6 +64,15 @@
cmdtable = {}
command = registrar.command(cmdtable)
+colortable = {
+ 'phabricator.action.created': 'green',
+ 'phabricator.action.skipped': 'magenta',
+ 'phabricator.action.updated': 'magenta',
+ 'phabricator.desc': '',
+ 'phabricator.drev': 'bold',
+ 'phabricator.node': '',
+}
+
def urlencodenested(params):
"""like urlencode, but works with nested parameters.
@@ -93,12 +113,20 @@
"""call Conduit API, params is a dict. return json.loads result, or None"""
host, token = readurltoken(repo)
url, authinfo = util.url('/'.join([host, 'api', name])).authinfo()
- urlopener = urlmod.opener(repo.ui, authinfo)
repo.ui.debug('Conduit Call: %s %s\n' % (url, params))
params = params.copy()
params['api.token'] = token
- request = util.urlreq.request(url, data=urlencodenested(params))
- body = urlopener.open(request).read()
+ data = urlencodenested(params)
+ curlcmd = repo.ui.config('phabricator', 'curlcmd')
+ if curlcmd:
+ sin, sout = util.popen2('%s -d @- %s' % (curlcmd, util.shellquote(url)))
+ sin.write(data)
+ sin.close()
+ body = sout.read()
+ else:
+ urlopener = urlmod.opener(repo.ui, authinfo)
+ request = util.urlreq.request(url, data=data)
+ body = urlopener.open(request).read()
repo.ui.debug('Conduit Response: %s\n' % body)
parsed = json.loads(body)
if parsed.get(r'error_code'):
@@ -138,70 +166,84 @@
_differentialrevisiontagre = re.compile('\AD([1-9][0-9]*)\Z')
_differentialrevisiondescre = re.compile(
- '^Differential Revision:\s*(.*)D([1-9][0-9]*)$', re.M)
+ '^Differential Revision:\s*(?:.*)D([1-9][0-9]*)$', re.M)
def getoldnodedrevmap(repo, nodelist):
"""find previous nodes that has been sent to Phabricator
- return {node: (oldnode or None, Differential Revision ID)}
+ return {node: (oldnode, Differential diff, Differential Revision ID)}
for node in nodelist with known previous sent versions, or associated
- Differential Revision IDs.
+ Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
+ be ``None``.
- Examines all precursors and their tags. Tags with format like "D1234" are
- considered a match and the node with that tag, and the number after "D"
- (ex. 1234) will be returned.
+ Examines commit messages like "Differential Revision:" to get the
+ association information.
- If tags are not found, examine commit message. The "Differential Revision:"
- line could associate this changeset to a Differential Revision.
+ If such commit message line is not found, examines all precursors and their
+ tags. Tags with format like "D1234" are considered a match and the node
+ with that tag, and the number after "D" (ex. 1234) will be returned.
+
+ The ``old node``, if not None, is guaranteed to be the last diff of
+ corresponding Differential Revision, and exist in the repo.
"""
url, token = readurltoken(repo)
unfi = repo.unfiltered()
nodemap = unfi.changelog.nodemap
- result = {} # {node: (oldnode or None, drev)}
- toconfirm = {} # {node: (oldnode, {precnode}, drev)}
+ result = {} # {node: (oldnode?, lastdiff?, drev)}
+ toconfirm = {} # {node: (force, {precnode}, drev)}
for node in nodelist:
ctx = unfi[node]
# For tags like "D123", put them into "toconfirm" to verify later
- precnodes = list(obsolete.allprecursors(unfi.obsstore, [node]))
+ precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
for n in precnodes:
if n in nodemap:
for tag in unfi.nodetags(n):
m = _differentialrevisiontagre.match(tag)
if m:
- toconfirm[node] = (n, set(precnodes), int(m.group(1)))
+ toconfirm[node] = (0, set(precnodes), int(m.group(1)))
continue
- # Check commit message (make sure URL matches)
+ # Check commit message
m = _differentialrevisiondescre.search(ctx.description())
if m:
- if m.group(1).rstrip('/') == url.rstrip('/'):
- result[node] = (None, int(m.group(2)))
- else:
- unfi.ui.warn(_('%s: Differential Revision URL ignored - host '
- 'does not match config\n') % ctx)
+ toconfirm[node] = (1, set(precnodes), int(m.group(1)))
# Double check if tags are genuine by collecting all old nodes from
# Phabricator, and expect precursors overlap with it.
if toconfirm:
- confirmed = {} # {drev: {oldnode}}
- drevs = [drev for n, precs, drev in toconfirm.values()]
- diffs = callconduit(unfi, 'differential.querydiffs',
- {'revisionIDs': drevs})
- for diff in diffs.values():
- drev = int(diff[r'revisionID'])
- oldnode = bin(encoding.unitolocal(getdiffmeta(diff).get(r'node')))
- if node:
- confirmed.setdefault(drev, set()).add(oldnode)
- for newnode, (oldnode, precset, drev) in toconfirm.items():
- if bool(precset & confirmed.get(drev, set())):
- result[newnode] = (oldnode, drev)
- else:
+ drevs = [drev for force, precs, drev in toconfirm.values()]
+ alldiffs = callconduit(unfi, 'differential.querydiffs',
+ {'revisionIDs': drevs})
+ getnode = lambda d: bin(encoding.unitolocal(
+ getdiffmeta(d).get(r'node', ''))) or None
+ for newnode, (force, precset, drev) in toconfirm.items():
+ diffs = [d for d in alldiffs.values()
+ if int(d[r'revisionID']) == drev]
+
+ # "precursors" as known by Phabricator
+ phprecset = set(getnode(d) for d in diffs)
+
+ # Ignore if precursors (Phabricator and local repo) do not overlap,
+ # and force is not set (when commit message says nothing)
+ if not force and not bool(phprecset & precset):
tagname = 'D%d' % drev
tags.tag(repo, tagname, nullid, message=None, user=None,
date=None, local=True)
unfi.ui.warn(_('D%s: local tag removed - does not match '
'Differential history\n') % drev)
+ continue
+
+ # Find the last node using Phabricator metadata, and make sure it
+ # exists in the repo
+ oldnode = lastdiff = None
+ if diffs:
+ lastdiff = max(diffs, key=lambda d: int(d[r'id']))
+ oldnode = getnode(lastdiff)
+ if oldnode and oldnode not in nodemap:
+ oldnode = None
+
+ result[newnode] = (oldnode, lastdiff, drev)
return result
@@ -241,7 +283,7 @@
callconduit(ctx.repo(), 'differential.setdiffproperty', params)
def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
- actions=None):
+ olddiff=None, actions=None):
"""create or update a Differential Revision
If revid is None, create a new Differential Revision, otherwise update
@@ -254,7 +296,7 @@
"""
repo = ctx.repo()
if oldnode:
- diffopts = mdiff.diffopts(git=True, context=1)
+ diffopts = mdiff.diffopts(git=True, context=32767)
oldctx = repo.unfiltered()[oldnode]
neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
else:
@@ -263,8 +305,14 @@
transactions = []
if neednewdiff:
diff = creatediff(ctx)
- writediffproperties(ctx, diff)
transactions.append({'type': 'update', 'value': diff[r'phid']})
+ else:
+ # Even if we don't need to upload a new diff because the patch content
+ # does not change. We might still need to update its metadata so
+ # pushers could know the correct node metadata.
+ assert olddiff
+ diff = olddiff
+ writediffproperties(ctx, diff)
# Use a temporary summary to set dependency. There might be better ways but
# I cannot find them for now. But do not do that if we are updating an
@@ -295,7 +343,7 @@
if not revision:
raise error.Abort(_('cannot create revision for %s') % ctx)
- return revision
+ return revision, diff
def userphids(repo, names):
"""convert user names to PHIDs"""
@@ -313,7 +361,9 @@
@command('phabsend',
[('r', 'rev', [], _('revisions to send'), _('REV')),
- ('', 'reviewer', [], _('specify reviewers'))],
+ ('', 'amend', True, _('update commit messages')),
+ ('', 'reviewer', [], _('specify reviewers')),
+ ('', 'confirm', None, _('ask for confirmation before sending'))],
_('REV [OPTIONS]'))
def phabsend(ui, repo, *revs, **opts):
"""upload changesets to Phabricator
@@ -326,12 +376,39 @@
maintain the association. After the first time, phabsend will check
obsstore and tags information so it can figure out whether to update an
existing Differential Revision, or create a new one.
+
+ If --amend is set, update commit messages so they have the
+ ``Differential Revision`` URL, remove related tags. This is similar to what
+ arcanist will do, and is more desired in author-push workflows. Otherwise,
+ use local tags to record the ``Differential Revision`` association.
+
+ The --confirm option lets you confirm changesets before sending them. You
+ can also add following to your configuration file to make it default
+ behaviour::
+
+ [phabsend]
+ confirm = true
+
+ phabsend will check obsstore and the above association to decide whether to
+ update an existing Differential Revision, or create a new one.
"""
revs = list(revs) + opts.get('rev', [])
revs = scmutil.revrange(repo, revs)
if not revs:
raise error.Abort(_('phabsend requires at least one changeset'))
+ if opts.get('amend'):
+ cmdutil.checkunfinished(repo)
+
+ # {newnode: (oldnode, olddiff, olddrev}
+ oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
+
+ confirm = ui.configbool('phabsend', 'confirm')
+ confirm |= bool(opts.get('confirm'))
+ if confirm:
+ confirmed = _confirmbeforesend(repo, revs, oldmap)
+ if not confirmed:
+ raise error.Abort(_('phabsend cancelled'))
actions = []
reviewers = opts.get('reviewer', [])
@@ -339,7 +416,8 @@
phids = userphids(repo, reviewers)
actions.append({'type': 'reviewers.add', 'value': phids})
- oldnodedrev = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
+ drevids = [] # [int]
+ diffmap = {} # {newnode: diff}
# Send patches one by one so we know their Differential Revision IDs and
# can provide dependency relationship
@@ -349,41 +427,183 @@
ctx = repo[rev]
# Get Differential Revision ID
- oldnode, revid = oldnodedrev.get(ctx.node(), (None, None))
- if oldnode != ctx.node():
+ oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
+ if oldnode != ctx.node() or opts.get('amend'):
# Create or update Differential Revision
- revision = createdifferentialrevision(ctx, revid, lastrevid,
- oldnode, actions)
+ revision, diff = createdifferentialrevision(
+ ctx, revid, lastrevid, oldnode, olddiff, actions)
+ diffmap[ctx.node()] = diff
newrevid = int(revision[r'object'][r'id'])
if revid:
- action = _('updated')
+ action = 'updated'
else:
- action = _('created')
+ action = 'created'
- # Create a local tag to note the association
- tagname = 'D%d' % newrevid
- tags.tag(repo, tagname, ctx.node(), message=None, user=None,
- date=None, local=True)
+ # Create a local tag to note the association, if commit message
+ # does not have it already
+ m = _differentialrevisiondescre.search(ctx.description())
+ if not m or int(m.group(1)) != newrevid:
+ tagname = 'D%d' % newrevid
+ tags.tag(repo, tagname, ctx.node(), message=None, user=None,
+ date=None, local=True)
else:
# Nothing changed. But still set "newrevid" so the next revision
# could depend on this one.
newrevid = revid
- action = _('skipped')
+ action = 'skipped'
+
+ actiondesc = ui.label(
+ {'created': _('created'),
+ 'skipped': _('skipped'),
+ 'updated': _('updated')}[action],
+ 'phabricator.action.%s' % action)
+ drevdesc = ui.label('D%s' % newrevid, 'phabricator.drev')
+ nodedesc = ui.label(bytes(ctx), 'phabricator.node')
+ desc = ui.label(ctx.description().split('\n')[0], 'phabricator.desc')
+ ui.write(_('%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
+ desc))
+ drevids.append(newrevid)
+ lastrevid = newrevid
- ui.write(_('D%s: %s - %s: %s\n') % (newrevid, action, ctx,
- ctx.description().split('\n')[0]))
- lastrevid = newrevid
+ # Update commit messages and remove tags
+ if opts.get('amend'):
+ unfi = repo.unfiltered()
+ drevs = callconduit(repo, 'differential.query', {'ids': drevids})
+ with repo.wlock(), repo.lock(), repo.transaction('phabsend'):
+ wnode = unfi['.'].node()
+ mapping = {} # {oldnode: [newnode]}
+ for i, rev in enumerate(revs):
+ old = unfi[rev]
+ drevid = drevids[i]
+ drev = [d for d in drevs if int(d[r'id']) == drevid][0]
+ newdesc = getdescfromdrev(drev)
+ # Make sure commit message contain "Differential Revision"
+ if old.description() != newdesc:
+ parents = [
+ mapping.get(old.p1().node(), (old.p1(),))[0],
+ mapping.get(old.p2().node(), (old.p2(),))[0],
+ ]
+ new = context.metadataonlyctx(
+ repo, old, parents=parents, text=newdesc,
+ user=old.user(), date=old.date(), extra=old.extra())
+ newnode = new.commit()
+ mapping[old.node()] = [newnode]
+ # Update diff property
+ writediffproperties(unfi[newnode], diffmap[old.node()])
+ # Remove local tags since it's no longer necessary
+ tagname = 'D%d' % drevid
+ if tagname in repo.tags():
+ tags.tag(repo, tagname, nullid, message=None, user=None,
+ date=None, local=True)
+ scmutil.cleanupnodes(repo, mapping, 'phabsend')
+ if wnode in mapping:
+ unfi.setparents(mapping[wnode][0])
# Map from "hg:meta" keys to header understood by "hg import". The order is
# consistent with "hg export" output.
_metanamemap = util.sortdict([(r'user', 'User'), (r'date', 'Date'),
(r'node', 'Node ID'), (r'parent', 'Parent ')])
-def querydrev(repo, params, stack=False):
+def _confirmbeforesend(repo, revs, oldmap):
+ url, token = readurltoken(repo)
+ ui = repo.ui
+ for rev in revs:
+ ctx = repo[rev]
+ desc = ctx.description().splitlines()[0]
+ oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
+ if drevid:
+ drevdesc = ui.label('D%s' % drevid, 'phabricator.drev')
+ else:
+ drevdesc = ui.label(_('NEW'), 'phabricator.drev')
+
+ ui.write(_('%s - %s: %s\n') % (drevdesc,
+ ui.label(bytes(ctx), 'phabricator.node'),
+ ui.label(desc, 'phabricator.desc')))
+
+ if ui.promptchoice(_('Send the above changes to %s (yn)?'
+ '$$ &Yes $$ &No') % url):
+ return False
+
+ return True
+
+_knownstatusnames = {'accepted', 'needsreview', 'needsrevision', 'closed',
+ 'abandoned'}
+
+def _getstatusname(drev):
+ """get normalized status name from a Differential Revision"""
+ return drev[r'statusName'].replace(' ', '').lower()
+
+# Small language to specify differential revisions. Support symbols: (), :X,
+# +, and -.
+
+_elements = {
+ # token-type: binding-strength, primary, prefix, infix, suffix
+ '(': (12, None, ('group', 1, ')'), None, None),
+ ':': (8, None, ('ancestors', 8), None, None),
+ '&': (5, None, None, ('and_', 5), None),
+ '+': (4, None, None, ('add', 4), None),
+ '-': (4, None, None, ('sub', 4), None),
+ ')': (0, None, None, None, None),
+ 'symbol': (0, 'symbol', None, None, None),
+ 'end': (0, None, None, None, None),
+}
+
+def _tokenize(text):
+ view = memoryview(text) # zero-copy slice
+ special = '():+-& '
+ pos = 0
+ length = len(text)
+ while pos < length:
+ symbol = ''.join(itertools.takewhile(lambda ch: ch not in special,
+ view[pos:]))
+ if symbol:
+ yield ('symbol', symbol, pos)
+ pos += len(symbol)
+ else: # special char, ignore space
+ if text[pos] != ' ':
+ yield (text[pos], None, pos)
+ pos += 1
+ yield ('end', None, pos)
+
+def _parse(text):
+ tree, pos = parser.parser(_elements).parse(_tokenize(text))
+ if pos != len(text):
+ raise error.ParseError('invalid token', pos)
+ return tree
+
+def _parsedrev(symbol):
+ """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
+ if symbol.startswith('D') and symbol[1:].isdigit():
+ return int(symbol[1:])
+ if symbol.isdigit():
+ return int(symbol)
+
+def _prefetchdrevs(tree):
+ """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
+ drevs = set()
+ ancestordrevs = set()
+ op = tree[0]
+ if op == 'symbol':
+ r = _parsedrev(tree[1])
+ if r:
+ drevs.add(r)
+ elif op == 'ancestors':
+ r, a = _prefetchdrevs(tree[1])
+ drevs.update(r)
+ ancestordrevs.update(r)
+ ancestordrevs.update(a)
+ else:
+ for t in tree[1:]:
+ r, a = _prefetchdrevs(t)
+ drevs.update(r)
+ ancestordrevs.update(a)
+ return drevs, ancestordrevs
+
+def querydrev(repo, spec):
"""return a list of "Differential Revision" dicts
- params is the input of "differential.query" API, and is expected to match
- just a single Differential Revision.
+ spec is a string using a simple query language, see docstring in phabread
+ for details.
A "Differential Revision dict" looks like:
@@ -420,26 +640,13 @@
"repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
"sourcePath": null
}
-
- If stack is True, return a list of "Differential Revision dict"s in an
- order that the latter ones depend on the former ones. Otherwise, return a
- list of a unique "Differential Revision dict".
"""
- prefetched = {} # {id or phid: drev}
def fetch(params):
"""params -> single drev or None"""
key = (params.get(r'ids') or params.get(r'phids') or [None])[0]
if key in prefetched:
return prefetched[key]
- # Otherwise, send the request. If we're fetching a stack, be smarter
- # and fetch more ids in one batch, even if it could be unnecessary.
- batchparams = params
- if stack and len(params.get(r'ids', [])) == 1:
- i = int(params[r'ids'][0])
- # developer config: phabricator.batchsize
- batchsize = repo.ui.configint('phabricator', 'batchsize', 12)
- batchparams = {'ids': range(max(1, i - batchsize), i + 1)}
- drevs = callconduit(repo, 'differential.query', batchparams)
+ drevs = callconduit(repo, 'differential.query', params)
# Fill prefetched with the result
for drev in drevs:
prefetched[drev[r'phid']] = drev
@@ -448,23 +655,66 @@
raise error.Abort(_('cannot get Differential Revision %r') % params)
return prefetched[key]
- visited = set()
- result = []
- queue = [params]
- while queue:
- params = queue.pop()
- drev = fetch(params)
- if drev[r'id'] in visited:
- continue
- visited.add(drev[r'id'])
- result.append(drev)
- if stack:
+ def getstack(topdrevids):
+ """given a top, get a stack from the bottom, [id] -> [id]"""
+ visited = set()
+ result = []
+ queue = [{r'ids': [i]} for i in topdrevids]
+ while queue:
+ params = queue.pop()
+ drev = fetch(params)
+ if drev[r'id'] in visited:
+ continue
+ visited.add(drev[r'id'])
+ result.append(int(drev[r'id']))
auxiliary = drev.get(r'auxiliary', {})
depends = auxiliary.get(r'phabricator:depends-on', [])
for phid in depends:
queue.append({'phids': [phid]})
- result.reverse()
- return result
+ result.reverse()
+ return smartset.baseset(result)
+
+ # Initialize prefetch cache
+ prefetched = {} # {id or phid: drev}
+
+ tree = _parse(spec)
+ drevs, ancestordrevs = _prefetchdrevs(tree)
+
+ # developer config: phabricator.batchsize
+ batchsize = repo.ui.configint('phabricator', 'batchsize', 12)
+
+ # Prefetch Differential Revisions in batch
+ tofetch = set(drevs)
+ for r in ancestordrevs:
+ tofetch.update(range(max(1, r - batchsize), r + 1))
+ if drevs:
+ fetch({r'ids': list(tofetch)})
+ validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
+
+ # Walk through the tree, return smartsets
+ def walk(tree):
+ op = tree[0]
+ if op == 'symbol':
+ drev = _parsedrev(tree[1])
+ if drev:
+ return smartset.baseset([drev])
+ elif tree[1] in _knownstatusnames:
+ drevs = [r for r in validids
+ if _getstatusname(prefetched[r]) == tree[1]]
+ return smartset.baseset(drevs)
+ else:
+ raise error.Abort(_('unknown symbol: %s') % tree[1])
+ elif op in {'and_', 'add', 'sub'}:
+ assert len(tree) == 3
+ return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
+ elif op == 'group':
+ return walk(tree[1])
+ elif op == 'ancestors':
+ return getstack(walk(tree[1]))
+ else:
+ raise error.ProgrammingError('illegal tree: %r' % tree)
+
+ return [prefetched[r] for r in walk(tree)]
def getdescfromdrev(drev):
"""get description (commit message) from "Differential Revision"
@@ -530,15 +780,12 @@
meta[r'parent'] = commit[r'parents'][0]
return meta or {}
-def readpatch(repo, params, write, stack=False):
+def readpatch(repo, drevs, write):
"""generate plain-text patch readable by 'hg import'
- write is usually ui.write. params is passed to "differential.query". If
- stack is True, also write dependent patches.
+ write is usually ui.write. drevs is what "querydrev" returns, results of
+ "differential.query".
"""
- # Differential Revisions
- drevs = querydrev(repo, params, stack)
-
# Prefetch hg:meta property for all diffs
diffids = sorted(set(max(int(v) for v in drev[r'diffs']) for drev in drevs))
diffs = callconduit(repo, 'differential.querydiffs', {'ids': diffids})
@@ -565,17 +812,56 @@
@command('phabread',
[('', 'stack', False, _('read dependencies'))],
- _('REVID [OPTIONS]'))
-def phabread(ui, repo, revid, **opts):
+ _('DREVSPEC [OPTIONS]'))
+def phabread(ui, repo, spec, **opts):
"""print patches from Phabricator suitable for importing
- REVID could be a Differential Revision identity, like ``D123``, or just the
- number ``123``, or a full URL like ``https://phab.example.com/D123``.
+ DREVSPEC could be a Differential Revision identity, like ``D123``, or just
+ the number ``123``. It could also have common operators like ``+``, ``-``,
+ ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
+ select a stack.
+
+ ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
+ could be used to filter patches by status. For performance reason, they
+ only represent a subset of non-status selections and cannot be used alone.
+
+ For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
+ D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
+ stack up to D9.
If --stack is given, follow dependencies information and read all patches.
+ It is equivalent to the ``:`` operator.
"""
- try:
- revid = int(revid.split('/')[-1].replace('D', ''))
- except ValueError:
- raise error.Abort(_('invalid Revision ID: %s') % revid)
- readpatch(repo, {'ids': [revid]}, ui.write, opts.get('stack'))
+ if opts.get('stack'):
+ spec = ':(%s)' % spec
+ drevs = querydrev(repo, spec)
+ readpatch(repo, drevs, ui.write)
+
+@command('phabupdate',
+ [('', 'accept', False, _('accept revisions')),
+ ('', 'reject', False, _('reject revisions')),
+ ('', 'abandon', False, _('abandon revisions')),
+ ('', 'reclaim', False, _('reclaim revisions')),
+ ('m', 'comment', '', _('comment on the last revision')),
+ ], _('DREVSPEC [OPTIONS]'))
+def phabupdate(ui, repo, spec, **opts):
+ """update Differential Revision in batch
+
+ DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
+ """
+ flags = [n for n in 'accept reject abandon reclaim'.split() if opts.get(n)]
+ if len(flags) > 1:
+ raise error.Abort(_('%s cannot be used together') % ', '.join(flags))
+
+ actions = []
+ for f in flags:
+ actions.append({'type': f, 'value': 'true'})
+
+ drevs = querydrev(repo, spec)
+ for i, drev in enumerate(drevs):
+ if i + 1 == len(drevs) and opts.get('comment'):
+ actions.append({'type': 'comment', 'value': opts['comment']})
+ if actions:
+ params = {'objectIdentifier': drev[r'phid'],
+ 'transactions': actions}
+ callconduit(repo, 'differential.revision.edit', params)
--- a/contrib/python3-whitelist Mon Sep 18 10:54:00 2017 -0700
+++ b/contrib/python3-whitelist Sat Sep 30 07:52:48 2017 -0700
@@ -1,33 +1,75 @@
+test-addremove.t
test-ancestor.py
test-backwards-remove.t
+test-bheads.t
+test-bisect2.t
+test-bookmarks-merge.t
+test-bookmarks-strip.t
test-branch-tag-confict.t
test-casecollision.t
+test-changelog-exec.t
test-check-commit.t
test-check-execute.t
+test-check-module-imports.t
test-check-pyflakes.t
test-check-pylint.t
test-check-shbang.t
+test-commit-unresolved.t
test-contrib-check-code.t
test-contrib-check-commit.t
+test-debugrename.t
+test-diff-copy-depth.t
+test-diff-hashes.t
test-diff-issue2761.t
test-diff-newlines.t
test-diff-reverse.t
test-diff-subdir.t
test-dirstate-nonnormalset.t
test-doctest.py
+test-double-merge.t
+test-duplicateoptions.py
test-empty-dir.t
+test-empty-file.t
+test-empty.t
+test-encoding-func.py
test-excessive-merge.t
+test-hghave.t
+test-imports-checker.t
test-issue1089.t
+test-issue1877.t
test-issue1993.t
+test-issue612.t
+test-issue619.t
+test-issue672.t
test-issue842.t
+test-journal-exists.t
test-locate.t
test-lrucachedict.py
test-manifest.py
+test-match.py
test-merge-default.t
test-merge2.t
+test-merge4.t
test-merge5.t
+test-permissions.t
+test-push-checkheads-pruned-B1.t
+test-push-checkheads-pruned-B6.t
+test-push-checkheads-pruned-B7.t
+test-push-checkheads-superceed-A1.t
+test-push-checkheads-superceed-A4.t
+test-push-checkheads-superceed-A5.t
+test-push-checkheads-superceed-A8.t
+test-push-checkheads-unpushed-D1.t
+test-push-checkheads-unpushed-D6.t
+test-push-checkheads-unpushed-D7.t
+test-rename-merge1.t
+test-rename.t
test-revlog-packentry.t
test-run-tests.py
+test-show-stack.t
+test-status-terse.t
+test-terse-status.t
test-unified-test.t
+test-update-issue1456.t
test-update-reverse.t
test-xdg.t
--- a/contrib/simplemerge Mon Sep 18 10:54:00 2017 -0700
+++ b/contrib/simplemerge Sat Sep 30 07:52:48 2017 -0700
@@ -1,12 +1,21 @@
#!/usr/bin/env python
-
-from mercurial import demandimport
-demandimport.enable()
+from __future__ import absolute_import
import getopt
import sys
+
+import hgdemandimport
+hgdemandimport.enable()
+
from mercurial.i18n import _
-from mercurial import error, simplemerge, fancyopts, util, ui
+from mercurial import (
+ context,
+ error,
+ fancyopts,
+ simplemerge,
+ ui as uimod,
+ util,
+)
options = [('L', 'label', [], _('labels to use on conflict markers')),
('a', 'text', None, _('treat all files as text')),
@@ -55,7 +64,12 @@
sys.exit(0)
if len(args) != 3:
raise ParseError(_('wrong number of arguments'))
- sys.exit(simplemerge.simplemerge(ui.ui.load(), *args, **opts))
+ local, base, other = args
+ sys.exit(simplemerge.simplemerge(uimod.ui.load(),
+ context.arbitraryfilectx(local),
+ context.arbitraryfilectx(base),
+ context.arbitraryfilectx(other),
+ **opts))
except ParseError as e:
sys.stdout.write("%s: %s\n" % (sys.argv[0], e))
showhelp()
--- a/contrib/synthrepo.py Mon Sep 18 10:54:00 2017 -0700
+++ b/contrib/synthrepo.py Sat Sep 30 07:52:48 2017 -0700
@@ -479,7 +479,7 @@
date = min(0x7fffffff, max(0, date))
user = random.choice(words) + '@' + random.choice(words)
mc = context.memctx(repo, pl, makeline(minimum=2),
- sorted(changes.iterkeys()),
+ sorted(changes),
filectxfn, user, '%d %d' % (date, pick(tzoffset)))
newnode = mc.commit()
heads.add(repo.changelog.rev(newnode))
--- a/contrib/undumprevlog Mon Sep 18 10:54:00 2017 -0700
+++ b/contrib/undumprevlog Sat Sep 30 07:52:48 2017 -0700
@@ -3,7 +3,7 @@
# $ hg init
# $ undumprevlog < repo.dump
-from __future__ import absolute_import
+from __future__ import absolute_import, print_function
import sys
from mercurial import (
@@ -27,7 +27,7 @@
if l.startswith("file:"):
f = l[6:-1]
r = revlog.revlog(opener, f)
- print f
+ print(f)
elif l.startswith("node:"):
n = node.bin(l[6:-1])
elif l.startswith("linkrev:"):
--- a/hg Mon Sep 18 10:54:00 2017 -0700
+++ b/hg Sat Sep 30 07:52:48 2017 -0700
@@ -6,6 +6,7 @@
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
+from __future__ import absolute_import
import os
import sys
@@ -36,10 +37,11 @@
sys.stderr.write("(check your install and PYTHONPATH)\n")
sys.exit(-1)
-import mercurial.util
-import mercurial.dispatch
-
+from mercurial import (
+ dispatch,
+ util,
+)
for fp in (sys.stdin, sys.stdout, sys.stderr):
- mercurial.util.setbinary(fp)
+ util.setbinary(fp)
-mercurial.dispatch.run()
+dispatch.run()
--- a/hgdemandimport/__init__.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgdemandimport/__init__.py Sat Sep 30 07:52:48 2017 -0700
@@ -13,6 +13,7 @@
from __future__ import absolute_import
+import os
import sys
if sys.version_info[0] >= 3:
@@ -68,6 +69,11 @@
# Re-export.
isenabled = demandimport.isenabled
-enable = demandimport.enable
disable = demandimport.disable
deactivated = demandimport.deactivated
+
+def enable():
+ # chg pre-imports modules so do not enable demandimport for it
+ if ('CHGINTERNALMARK' not in os.environ
+ and os.environ.get('HGDEMANDIMPORT') != 'disable'):
+ demandimport.enable()
--- a/hgdemandimport/demandimportpy2.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgdemandimport/demandimportpy2.py Sat Sep 30 07:52:48 2017 -0700
@@ -28,7 +28,6 @@
import __builtin__ as builtins
import contextlib
-import os
import sys
contextmanager = contextlib.contextmanager
@@ -285,8 +284,7 @@
def enable():
"enable global demand-loading of modules"
- if os.environ.get('HGDEMANDIMPORT') != 'disable':
- builtins.__import__ = _demandimport
+ builtins.__import__ = _demandimport
def disable():
"disable global demand-loading of modules"
--- a/hgdemandimport/demandimportpy3.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgdemandimport/demandimportpy3.py Sat Sep 30 07:52:48 2017 -0700
@@ -27,12 +27,10 @@
from __future__ import absolute_import
import contextlib
-import os
-import sys
-
import importlib.abc
import importlib.machinery
import importlib.util
+import sys
_deactivated = False
@@ -81,8 +79,7 @@
pass
def enable():
- if os.environ.get('HGDEMANDIMPORT') != 'disable':
- sys.path_hooks.insert(0, _makefinder)
+ sys.path_hooks.insert(0, _makefinder)
@contextlib.contextmanager
def deactivated():
--- a/hgext/blackbox.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgext/blackbox.py Sat Sep 30 07:52:48 2017 -0700
@@ -73,103 +73,64 @@
lastui = None
-filehandles = {}
+def _openlogfile(ui, vfs):
+ def rotate(oldpath, newpath):
+ try:
+ vfs.unlink(newpath)
+ except OSError as err:
+ if err.errno != errno.ENOENT:
+ ui.debug("warning: cannot remove '%s': %s\n" %
+ (newpath, err.strerror))
+ try:
+ if newpath:
+ vfs.rename(oldpath, newpath)
+ except OSError as err:
+ if err.errno != errno.ENOENT:
+ ui.debug("warning: cannot rename '%s' to '%s': %s\n" %
+ (newpath, oldpath, err.strerror))
-def _openlog(vfs):
- path = vfs.join('blackbox.log')
- if path in filehandles:
- return filehandles[path]
- filehandles[path] = fp = vfs('blackbox.log', 'a')
- return fp
-
-def _closelog(vfs):
- path = vfs.join('blackbox.log')
- fp = filehandles[path]
- del filehandles[path]
- fp.close()
+ maxsize = ui.configbytes('blackbox', 'maxsize')
+ name = 'blackbox.log'
+ if maxsize > 0:
+ try:
+ st = vfs.stat(name)
+ except OSError:
+ pass
+ else:
+ if st.st_size >= maxsize:
+ path = vfs.join(name)
+ maxfiles = ui.configint('blackbox', 'maxfiles', 7)
+ for i in xrange(maxfiles - 1, 1, -1):
+ rotate(oldpath='%s.%d' % (path, i - 1),
+ newpath='%s.%d' % (path, i))
+ rotate(oldpath=path,
+ newpath=maxfiles > 0 and path + '.1')
+ return vfs(name, 'a')
def wrapui(ui):
class blackboxui(ui.__class__):
- def __init__(self, src=None):
- super(blackboxui, self).__init__(src)
- if src is None:
- self._partialinit()
- else:
- self._bbfp = getattr(src, '_bbfp', None)
- self._bbinlog = False
- self._bbrepo = getattr(src, '_bbrepo', None)
- self._bbvfs = getattr(src, '_bbvfs', None)
-
- def _partialinit(self):
- if util.safehasattr(self, '_bbvfs'):
- return
- self._bbfp = None
- self._bbinlog = False
- self._bbrepo = None
- self._bbvfs = None
-
- def copy(self):
- self._partialinit()
- return self.__class__(self)
+ @property
+ def _bbvfs(self):
+ vfs = None
+ repo = getattr(self, '_bbrepo', None)
+ if repo:
+ vfs = repo.vfs
+ if not vfs.isdir('.'):
+ vfs = None
+ return vfs
@util.propertycache
def track(self):
return self.configlist('blackbox', 'track', ['*'])
- def _openlogfile(self):
- def rotate(oldpath, newpath):
- try:
- self._bbvfs.unlink(newpath)
- except OSError as err:
- if err.errno != errno.ENOENT:
- self.debug("warning: cannot remove '%s': %s\n" %
- (newpath, err.strerror))
- try:
- if newpath:
- self._bbvfs.rename(oldpath, newpath)
- except OSError as err:
- if err.errno != errno.ENOENT:
- self.debug("warning: cannot rename '%s' to '%s': %s\n" %
- (newpath, oldpath, err.strerror))
-
- fp = _openlog(self._bbvfs)
- maxsize = self.configbytes('blackbox', 'maxsize')
- if maxsize > 0:
- st = self._bbvfs.fstat(fp)
- if st.st_size >= maxsize:
- path = fp.name
- _closelog(self._bbvfs)
- maxfiles = self.configint('blackbox', 'maxfiles', 7)
- for i in xrange(maxfiles - 1, 1, -1):
- rotate(oldpath='%s.%d' % (path, i - 1),
- newpath='%s.%d' % (path, i))
- rotate(oldpath=path,
- newpath=maxfiles > 0 and path + '.1')
- fp = _openlog(self._bbvfs)
- return fp
-
- def _bbwrite(self, fmt, *args):
- self._bbfp.write(fmt % args)
- self._bbfp.flush()
-
def log(self, event, *msg, **opts):
global lastui
super(blackboxui, self).log(event, *msg, **opts)
- self._partialinit()
if not '*' in self.track and not event in self.track:
return
- if self._bbfp:
- ui = self
- elif self._bbvfs:
- try:
- self._bbfp = self._openlogfile()
- except (IOError, OSError) as err:
- self.debug('warning: cannot write to blackbox.log: %s\n' %
- err.strerror)
- del self._bbvfs
- self._bbfp = None
+ if self._bbvfs:
ui = self
else:
# certain ui instances exist outside the context of
@@ -177,47 +138,52 @@
# was seen.
ui = lastui
- if not ui or not ui._bbfp:
+ if not ui:
return
- if not lastui or ui._bbrepo:
+ vfs = ui._bbvfs
+ if not vfs:
+ return
+
+ repo = getattr(ui, '_bbrepo', None)
+ if not lastui or repo:
lastui = ui
- if ui._bbinlog:
- # recursion guard
+ if getattr(ui, '_bbinlog', False):
+ # recursion and failure guard
return
+ ui._bbinlog = True
+ default = self.configdate('devel', 'default-date')
+ date = util.datestr(default, '%Y/%m/%d %H:%M:%S')
+ user = util.getuser()
+ pid = '%d' % util.getpid()
+ formattedmsg = msg[0] % msg[1:]
+ rev = '(unknown)'
+ changed = ''
+ if repo:
+ ctx = repo[None]
+ parents = ctx.parents()
+ rev = ('+'.join([hex(p.node()) for p in parents]))
+ if (ui.configbool('blackbox', 'dirty') and
+ ctx.dirty(missing=True, merge=False, branch=False)):
+ changed = '+'
+ if ui.configbool('blackbox', 'logsource'):
+ src = ' [%s]' % event
+ else:
+ src = ''
try:
- ui._bbinlog = True
- default = self.configdate('devel', 'default-date')
- date = util.datestr(default, '%Y/%m/%d %H:%M:%S')
- user = util.getuser()
- pid = '%d' % util.getpid()
- formattedmsg = msg[0] % msg[1:]
- rev = '(unknown)'
- changed = ''
- if ui._bbrepo:
- ctx = ui._bbrepo[None]
- parents = ctx.parents()
- rev = ('+'.join([hex(p.node()) for p in parents]))
- if (ui.configbool('blackbox', 'dirty') and
- ctx.dirty(missing=True, merge=False, branch=False)):
- changed = '+'
- if ui.configbool('blackbox', 'logsource'):
- src = ' [%s]' % event
- else:
- src = ''
- try:
- ui._bbwrite('%s %s @%s%s (%s)%s> %s',
- date, user, rev, changed, pid, src, formattedmsg)
- except IOError as err:
- self.debug('warning: cannot write to blackbox.log: %s\n' %
- err.strerror)
- finally:
+ fmt = '%s %s @%s%s (%s)%s> %s'
+ args = (date, user, rev, changed, pid, src, formattedmsg)
+ with _openlogfile(ui, vfs) as fp:
+ fp.write(fmt % args)
+ except (IOError, OSError) as err:
+ self.debug('warning: cannot write to blackbox.log: %s\n' %
+ err.strerror)
+ # do not restore _bbinlog intentionally to avoid failed
+ # logging again
+ else:
ui._bbinlog = False
def setrepo(self, repo):
- self._bbfp = None
- self._bbinlog = False
self._bbrepo = repo
- self._bbvfs = repo.vfs
ui.__class__ = blackboxui
uimod.ui = blackboxui
@@ -234,6 +200,13 @@
if util.safehasattr(ui, 'setrepo'):
ui.setrepo(repo)
+
+ # Set lastui even if ui.log is not called. This gives blackbox a
+ # fallback place to log.
+ global lastui
+ if lastui is None:
+ lastui = ui
+
repo._wlockfreeprefix.add('blackbox.log')
@command('^blackbox',
--- a/hgext/convert/__init__.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgext/convert/__init__.py Sat Sep 30 07:52:48 2017 -0700
@@ -28,6 +28,88 @@
# leave the attribute unspecified.
testedwith = 'ships-with-hg-core'
+configtable = {}
+configitem = registrar.configitem(configtable)
+
+configitem('convert', 'cvsps.cache',
+ default=True,
+)
+configitem('convert', 'cvsps.fuzz',
+ default=60,
+)
+configitem('convert', 'cvsps.mergefrom',
+ default=None,
+)
+configitem('convert', 'cvsps.mergeto',
+ default=None,
+)
+configitem('convert', 'git.committeractions',
+ default=lambda: ['messagedifferent'],
+)
+configitem('convert', 'git.extrakeys',
+ default=list,
+)
+configitem('convert', 'git.findcopiesharder',
+ default=False,
+)
+configitem('convert', 'git.remoteprefix',
+ default='remote',
+)
+configitem('convert', 'git.renamelimit',
+ default=400,
+)
+configitem('convert', 'git.saverev',
+ default=True,
+)
+configitem('convert', 'git.similarity',
+ default=50,
+)
+configitem('convert', 'git.skipsubmodules',
+ default=False,
+)
+configitem('convert', 'hg.clonebranches',
+ default=False,
+)
+configitem('convert', 'hg.ignoreerrors',
+ default=False,
+)
+configitem('convert', 'hg.revs',
+ default=None,
+)
+configitem('convert', 'hg.saverev',
+ default=False,
+)
+configitem('convert', 'hg.sourcename',
+ default=None,
+)
+configitem('convert', 'hg.startrev',
+ default=None,
+)
+configitem('convert', 'hg.tagsbranch',
+ default='default',
+)
+configitem('convert', 'hg.usebranchnames',
+ default=True,
+)
+configitem('convert', 'ignoreancestorcheck',
+ default=False,
+)
+configitem('convert', 'localtimezone',
+ default=False,
+)
+configitem('convert', 'p4.startrev',
+ default=0,
+)
+configitem('convert', 'skiptags',
+ default=False,
+)
+configitem('convert', 'svn.debugsvnlog',
+ default=True,
+)
+configitem('convert', 'svn.startrev',
+ default=0,
+)
+
# Commands definition was moved elsewhere to ease demandload job.
@command('convert',
--- a/hgext/convert/common.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgext/convert/common.py Sat Sep 30 07:52:48 2017 -0700
@@ -15,6 +15,7 @@
from mercurial.i18n import _
from mercurial import (
+ encoding,
error,
phases,
util,
@@ -475,8 +476,9 @@
try:
self.fp = open(self.path, 'a')
except IOError as err:
- raise error.Abort(_('could not open map file %r: %s') %
- (self.path, err.strerror))
+ raise error.Abort(
+ _('could not open map file %r: %s') %
+ (self.path, encoding.strtolocal(err.strerror)))
self.fp.write('%s %s\n' % (key, value))
self.fp.flush()
super(mapfile, self).__setitem__(key, value)
--- a/hgext/convert/convcmd.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgext/convert/convcmd.py Sat Sep 30 07:52:48 2017 -0700
@@ -59,18 +59,18 @@
def mapbranch(branch, branchmap):
'''
- >>> bmap = {'default': 'branch1'}
- >>> for i in ['', None]:
+ >>> bmap = {b'default': b'branch1'}
+ >>> for i in [b'', None]:
... mapbranch(i, bmap)
'branch1'
'branch1'
- >>> bmap = {'None': 'branch2'}
- >>> for i in ['', None]:
+ >>> bmap = {b'None': b'branch2'}
+ >>> for i in [b'', None]:
... mapbranch(i, bmap)
'branch2'
'branch2'
- >>> bmap = {'None': 'branch3', 'default': 'branch4'}
- >>> for i in ['None', '', None, 'default', 'branch5']:
+ >>> bmap = {b'None': b'branch3', b'default': b'branch4'}
+ >>> for i in [b'None', b'', None, b'default', b'branch5']:
... mapbranch(i, bmap)
'branch3'
'branch4'
@@ -87,7 +87,7 @@
# At some point we used "None" literal to denote the default branch,
# attempt to use that for backward compatibility.
if (not branch):
- branch = branchmap.get(str(None), branch)
+ branch = branchmap.get('None', branch)
return branch
source_converters = [
--- a/hgext/convert/cvs.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgext/convert/cvs.py Sat Sep 30 07:52:48 2017 -0700
@@ -76,13 +76,13 @@
id = None
cache = 'update'
- if not self.ui.configbool('convert', 'cvsps.cache', True):
+ if not self.ui.configbool('convert', 'cvsps.cache'):
cache = None
db = cvsps.createlog(self.ui, cache=cache)
db = cvsps.createchangeset(self.ui, db,
- fuzz=int(self.ui.config('convert', 'cvsps.fuzz', 60)),
- mergeto=self.ui.config('convert', 'cvsps.mergeto', None),
- mergefrom=self.ui.config('convert', 'cvsps.mergefrom', None))
+ fuzz=int(self.ui.config('convert', 'cvsps.fuzz')),
+ mergeto=self.ui.config('convert', 'cvsps.mergeto'),
+ mergefrom=self.ui.config('convert', 'cvsps.mergefrom'))
for cs in db:
if maxrev and cs.id > maxrev:
--- a/hgext/convert/cvsps.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgext/convert/cvsps.py Sat Sep 30 07:52:48 2017 -0700
@@ -54,23 +54,23 @@
def getrepopath(cvspath):
"""Return the repository path from a CVS path.
- >>> getrepopath('/foo/bar')
+ >>> getrepopath(b'/foo/bar')
'/foo/bar'
- >>> getrepopath('c:/foo/bar')
+ >>> getrepopath(b'c:/foo/bar')
'/foo/bar'
- >>> getrepopath(':pserver:10/foo/bar')
+ >>> getrepopath(b':pserver:10/foo/bar')
'/foo/bar'
- >>> getrepopath(':pserver:10c:/foo/bar')
+ >>> getrepopath(b':pserver:10c:/foo/bar')
'/foo/bar'
- >>> getrepopath(':pserver:/foo/bar')
+ >>> getrepopath(b':pserver:/foo/bar')
'/foo/bar'
- >>> getrepopath(':pserver:c:/foo/bar')
+ >>> getrepopath(b':pserver:c:/foo/bar')
'/foo/bar'
- >>> getrepopath(':pserver:truc@foo.bar:/foo/bar')
+ >>> getrepopath(b':pserver:truc@foo.bar:/foo/bar')
'/foo/bar'
- >>> getrepopath(':pserver:truc@foo.bar:c:/foo/bar')
+ >>> getrepopath(b':pserver:truc@foo.bar:c:/foo/bar')
'/foo/bar'
- >>> getrepopath('user@server/path/to/repository')
+ >>> getrepopath(b'user@server/path/to/repository')
'/path/to/repository'
"""
# According to CVS manual, CVS paths are expressed like:
--- a/hgext/convert/filemap.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgext/convert/filemap.py Sat Sep 30 07:52:48 2017 -0700
@@ -3,7 +3,8 @@
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
+
+from __future__ import absolute_import, print_function
import posixpath
import shlex
@@ -18,7 +19,7 @@
def rpairs(path):
'''Yield tuples with path split at '/', starting with the full path.
No leading, trailing or double '/', please.
- >>> for x in rpairs('foo/bar/baz'): print x
+ >>> for x in rpairs(b'foo/bar/baz'): print(x)
('foo/bar/baz', '')
('foo/bar', 'baz')
('foo', 'bar/baz')
--- a/hgext/convert/git.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgext/convert/git.py Sat Sep 30 07:52:48 2017 -0700
@@ -81,18 +81,16 @@
path)
# The default value (50) is based on the default for 'git diff'.
- similarity = ui.configint('convert', 'git.similarity', default=50)
+ similarity = ui.configint('convert', 'git.similarity')
if similarity < 0 or similarity > 100:
raise error.Abort(_('similarity must be between 0 and 100'))
if similarity > 0:
self.simopt = ['-C%d%%' % similarity]
- findcopiesharder = ui.configbool('convert', 'git.findcopiesharder',
- False)
+ findcopiesharder = ui.configbool('convert', 'git.findcopiesharder')
if findcopiesharder:
self.simopt.append('--find-copies-harder')
- renamelimit = ui.configint('convert', 'git.renamelimit',
- default=400)
+ renamelimit = ui.configint('convert', 'git.renamelimit')
self.simopt.append('-l%d' % renamelimit)
else:
self.simopt = []
@@ -110,8 +108,7 @@
raise error.Abort(_('copying of extra key is forbidden: %s') %
_(', ').join(sorted(banned)))
- committeractions = self.ui.configlist('convert', 'git.committeractions',
- 'messagedifferent')
+ committeractions = self.ui.configlist('convert', 'git.committeractions')
messagedifferent = None
messagealways = None
@@ -264,8 +261,7 @@
lcount = len(difftree)
i = 0
- skipsubmodules = self.ui.configbool('convert', 'git.skipsubmodules',
- False)
+ skipsubmodules = self.ui.configbool('convert', 'git.skipsubmodules')
def add(entry, f, isdest):
seen.add(f)
h = entry[3]
@@ -375,7 +371,7 @@
tzs, tzh, tzm = tz[-5:-4] + "1", tz[-4:-2], tz[-2:]
tz = -int(tzs) * (int(tzh) * 3600 + int(tzm))
date = tm + " " + str(tz)
- saverev = self.ui.configbool('convert', 'git.saverev', True)
+ saverev = self.ui.configbool('convert', 'git.saverev')
c = common.commit(parents=parents, date=date, author=author,
desc=message,
@@ -448,7 +444,7 @@
bookmarks = {}
# Handle local and remote branches
- remoteprefix = self.ui.config('convert', 'git.remoteprefix', 'remote')
+ remoteprefix = self.ui.config('convert', 'git.remoteprefix')
reftypes = [
# (git prefix, hg prefix)
('refs/remotes/origin/', remoteprefix + '/'),
--- a/hgext/convert/hg.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgext/convert/hg.py Sat Sep 30 07:52:48 2017 -0700
@@ -47,9 +47,9 @@
class mercurial_sink(common.converter_sink):
def __init__(self, ui, path):
common.converter_sink.__init__(self, ui, path)
- self.branchnames = ui.configbool('convert', 'hg.usebranchnames', True)
- self.clonebranches = ui.configbool('convert', 'hg.clonebranches', False)
- self.tagsbranch = ui.config('convert', 'hg.tagsbranch', 'default')
+ self.branchnames = ui.configbool('convert', 'hg.usebranchnames')
+ self.clonebranches = ui.configbool('convert', 'hg.clonebranches')
+ self.tagsbranch = ui.config('convert', 'hg.tagsbranch')
self.lastbranch = None
if os.path.isdir(path) and len(os.listdir(path)) > 0:
try:
@@ -446,9 +446,9 @@
class mercurial_source(common.converter_source):
def __init__(self, ui, path, revs=None):
common.converter_source.__init__(self, ui, path, revs)
- self.ignoreerrors = ui.configbool('convert', 'hg.ignoreerrors', False)
+ self.ignoreerrors = ui.configbool('convert', 'hg.ignoreerrors')
self.ignored = set()
- self.saverev = ui.configbool('convert', 'hg.saverev', False)
+ self.saverev = ui.configbool('convert', 'hg.saverev')
try:
self.repo = hg.repository(self.ui, path)
# try to provoke an exception if this isn't really a hg
--- a/hgext/convert/p4.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgext/convert/p4.py Sat Sep 30 07:52:48 2017 -0700
@@ -32,9 +32,9 @@
"""Perforce escapes special characters @, #, *, or %
with %40, %23, %2A, or %25 respectively
- >>> decodefilename('portable-net45%252Bnetcore45%252Bwp8%252BMonoAndroid')
+ >>> decodefilename(b'portable-net45%252Bnetcore45%252Bwp8%252BMonoAndroid')
'portable-net45%2Bnetcore45%2Bwp8%2BMonoAndroid'
- >>> decodefilename('//Depot/Directory/%2525/%2523/%23%40.%2A')
+ >>> decodefilename(b'//Depot/Directory/%2525/%2523/%23%40.%2A')
'//Depot/Directory/%25/%23/#@.*'
"""
replacements = [('%2A', '*'), ('%23', '#'), ('%40', '@'), ('%25', '%')]
@@ -138,7 +138,7 @@
vieworder.sort(key=len, reverse=True)
# handle revision limiting
- startrev = self.ui.config('convert', 'p4.startrev', default=0)
+ startrev = self.ui.config('convert', 'p4.startrev')
# now read the full changelists to get the list of file revisions
ui.status(_('collecting p4 changelists\n'))
--- a/hgext/convert/subversion.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgext/convert/subversion.py Sat Sep 30 07:52:48 2017 -0700
@@ -61,16 +61,16 @@
def revsplit(rev):
"""Parse a revision string and return (uuid, path, revnum).
- >>> revsplit('svn:a2147622-4a9f-4db4-a8d3-13562ff547b2'
- ... '/proj%20B/mytrunk/mytrunk@1')
+ >>> revsplit(b'svn:a2147622-4a9f-4db4-a8d3-13562ff547b2'
+ ... b'/proj%20B/mytrunk/mytrunk@1')
('a2147622-4a9f-4db4-a8d3-13562ff547b2', '/proj%20B/mytrunk/mytrunk', 1)
- >>> revsplit('svn:8af66a51-67f5-4354-b62c-98d67cc7be1d@1')
+ >>> revsplit(b'svn:8af66a51-67f5-4354-b62c-98d67cc7be1d@1')
('', '', 1)
- >>> revsplit('@7')
+ >>> revsplit(b'@7')
('', '', 7)
- >>> revsplit('7')
+ >>> revsplit(b'7')
('', '', 0)
- >>> revsplit('bad')
+ >>> revsplit(b'bad')
('', '', 0)
"""
parts = rev.rsplit('@', 1)
@@ -354,7 +354,7 @@
self.trunkname = self.ui.config('convert', 'svn.trunk',
'trunk').strip('/')
- self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
+ self.startrev = self.ui.config('convert', 'svn.startrev')
try:
self.startrev = int(self.startrev)
if self.startrev < 0:
@@ -1059,7 +1059,7 @@
args = [self.baseurl, relpaths, start, end, limit,
discover_changed_paths, strict_node_history]
# developer config: convert.svn.debugsvnlog
- if not self.ui.configbool('convert', 'svn.debugsvnlog', True):
+ if not self.ui.configbool('convert', 'svn.debugsvnlog'):
return directlogstream(*args)
arg = encodeargs(args)
hgexe = util.hgexecutable()
--- a/hgext/eol.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgext/eol.py Sat Sep 30 07:52:48 2017 -0700
@@ -102,6 +102,7 @@
extensions,
match,
pycompat,
+ registrar,
util,
)
@@ -111,6 +112,19 @@
# leave the attribute unspecified.
testedwith = 'ships-with-hg-core'
+configtable = {}
+configitem = registrar.configitem(configtable)
+
+configitem('eol', 'fix-trailing-newline',
+ default=False,
+)
+configitem('eol', 'native',
+ default=pycompat.oslinesep,
+)
+configitem('eol', 'only-consistent',
+ default=True,
+)
+
# Matches a lone LF, i.e., one that is not part of CRLF.
singlelf = re.compile('(^|[^\r])\n')
@@ -121,9 +135,9 @@
"""Filter to convert to LF EOLs."""
if util.binary(s):
return s
- if ui.configbool('eol', 'only-consistent', True) and inconsistenteol(s):
+ if ui.configbool('eol', 'only-consistent') and inconsistenteol(s):
return s
- if (ui.configbool('eol', 'fix-trailing-newline', False)
+ if (ui.configbool('eol', 'fix-trailing-newline')
and s and s[-1] != '\n'):
s = s + '\n'
return util.tolf(s)
@@ -132,9 +146,9 @@
"""Filter to convert to CRLF EOLs."""
if util.binary(s):
return s
- if ui.configbool('eol', 'only-consistent', True) and inconsistenteol(s):
+ if ui.configbool('eol', 'only-consistent') and inconsistenteol(s):
return s
- if (ui.configbool('eol', 'fix-trailing-newline', False)
+ if (ui.configbool('eol', 'fix-trailing-newline')
and s and s[-1] != '\n'):
s = s + '\n'
return util.tocrlf(s)
@@ -166,7 +180,7 @@
isrepolf = self.cfg.get('repository', 'native') != 'CRLF'
self._encode['NATIVE'] = isrepolf and 'to-lf' or 'to-crlf'
- iswdlf = ui.config('eol', 'native', pycompat.oslinesep) in ('LF', '\n')
+ iswdlf = ui.config('eol', 'native') in ('LF', '\n')
self._decode['NATIVE'] = iswdlf and 'to-lf' or 'to-crlf'
include = []
--- a/hgext/fsmonitor/pywatchman/__init__.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgext/fsmonitor/pywatchman/__init__.py Sat Sep 30 07:52:48 2017 -0700
@@ -825,7 +825,7 @@
p = subprocess.Popen(cmd, **args)
except OSError as e:
- raise WatchmanError('"watchman" executable not in PATH (%s)', e)
+ raise WatchmanError('"watchman" executable not in PATH (%s)' % e)
stdout, stderr = p.communicate()
exitcode = p.poll()
--- a/hgext/hgk.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgext/hgk.py Sat Sep 30 07:52:48 2017 -0700
@@ -50,6 +50,7 @@
patch,
registrar,
scmutil,
+ util,
)
cmdtable = {}
@@ -96,7 +97,7 @@
while True:
if opts['stdin']:
try:
- line = raw_input().split(' ')
+ line = util.bytesinput(ui.fin, ui.fout).split(' ')
node1 = line[0]
if len(line) > 1:
node2 = line[1]
@@ -177,7 +178,7 @@
prefix = ""
if opts['stdin']:
try:
- (type, r) = raw_input().split(' ')
+ (type, r) = util.bytesinput(ui.fin, ui.fout).split(' ')
prefix = " "
except EOFError:
return
@@ -195,7 +196,7 @@
catcommit(ui, repo, n, prefix)
if opts['stdin']:
try:
- (type, r) = raw_input().split(' ')
+ (type, r) = util.bytesinput(ui.fin, ui.fout).split(' ')
except EOFError:
break
else:
--- a/hgext/histedit.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgext/histedit.py Sat Sep 30 07:52:48 2017 -0700
@@ -1370,7 +1370,7 @@
rules += '\n\n'
rules += editcomment
rules = ui.edit(rules, ui.username(), {'prefix': 'histedit'},
- repopath=repo.path)
+ repopath=repo.path, action='histedit')
# Save edit rules in .hg/histedit-last-edit.txt in case
# the user needs to ask for help after something
@@ -1417,6 +1417,11 @@
expected = set(c.node() for c in ctxs)
seen = set()
prev = None
+
+ if actions and actions[0].verb in ['roll', 'fold']:
+ raise error.ParseError(_('first changeset cannot use verb "%s"') %
+ actions[0].verb)
+
for action in actions:
action.verify(prev, expected, seen)
prev = action
--- a/hgext/journal.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgext/journal.py Sat Sep 30 07:52:48 2017 -0700
@@ -342,7 +342,7 @@
with self.jlock(vfs):
version = None
# open file in amend mode to ensure it is created if missing
- with vfs('namejournal', mode='a+b', atomictemp=True) as f:
+ with vfs('namejournal', mode='a+b') as f:
f.seek(0, os.SEEK_SET)
# Read just enough bytes to get a version number (up to 2
# digits plus separator)
--- a/hgext/keyword.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgext/keyword.py Sat Sep 30 07:52:48 2017 -0700
@@ -614,14 +614,14 @@
if kwt:
kwt.match = origmatch
-def kw_amend(orig, ui, repo, commitfunc, old, extra, pats, opts):
+def kw_amend(orig, ui, repo, old, extra, pats, opts):
'''Wraps cmdutil.amend expanding keywords after amend.'''
kwt = getattr(repo, '_keywordkwt', None)
if kwt is None:
- return orig(ui, repo, commitfunc, old, extra, pats, opts)
+ return orig(ui, repo, old, extra, pats, opts)
with repo.wlock():
kwt.postcommit = True
- newid = orig(ui, repo, commitfunc, old, extra, pats, opts)
+ newid = orig(ui, repo, old, extra, pats, opts)
if newid != old.node():
ctx = repo[newid]
kwt.restrict = True
--- a/hgext/largefiles/__init__.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgext/largefiles/__init__.py Sat Sep 30 07:52:48 2017 -0700
@@ -91,7 +91,7 @@
[largefiles]
patterns =
*.jpg
- re:.*\.(png|bmp)$
+ re:.*\\.(png|bmp)$
library.zip
content/audio/*
--- a/hgext/largefiles/lfcommands.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgext/largefiles/lfcommands.py Sat Sep 30 07:52:48 2017 -0700
@@ -422,14 +422,13 @@
return ([], [])
def downloadlfiles(ui, repo, rev=None):
- matchfn = scmutil.match(repo[None],
- [repo.wjoin(lfutil.shortname)], {})
+ match = scmutil.match(repo[None], [repo.wjoin(lfutil.shortname)], {})
def prepare(ctx, fns):
pass
totalsuccess = 0
totalmissing = 0
if rev != []: # walkchangerevs on empty list would return all revs
- for ctx in cmdutil.walkchangerevs(repo, matchfn, {'rev' : rev},
+ for ctx in cmdutil.walkchangerevs(repo, match, {'rev' : rev},
prepare):
success, missing = cachelfiles(ui, repo, ctx.node())
totalsuccess += len(success)
--- a/hgext/largefiles/lfutil.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgext/largefiles/lfutil.py Sat Sep 30 07:52:48 2017 -0700
@@ -155,7 +155,8 @@
# largefiles operation in a new clone.
if create and not vfs.exists(vfs.join(lfstoredir, 'dirstate')):
matcher = getstandinmatcher(repo)
- standins = repo.dirstate.walk(matcher, [], False, False)
+ standins = repo.dirstate.walk(matcher, subrepos=[], unknown=False,
+ ignored=False)
if len(standins) > 0:
vfs.makedirs(lfstoredir)
@@ -168,7 +169,8 @@
def lfdirstatestatus(lfdirstate, repo):
pctx = repo['.']
match = matchmod.always(repo.root, repo.getcwd())
- unsure, s = lfdirstate.status(match, [], False, False, False)
+ unsure, s = lfdirstate.status(match, subrepos=[], ignored=False,
+ clean=False, unknown=False)
modified, clean = s.modified, s.clean
for lfile in unsure:
try:
@@ -428,7 +430,8 @@
standins = []
matcher = getstandinmatcher(repo)
wctx = repo[None]
- for standin in repo.dirstate.walk(matcher, [], False, False):
+ for standin in repo.dirstate.walk(matcher, subrepos=[], unknown=False,
+ ignored=False):
lfile = splitstandin(standin)
try:
hash = readasstandin(wctx[standin])
@@ -549,8 +552,8 @@
# large.
lfdirstate = openlfdirstate(ui, repo)
dirtymatch = matchmod.always(repo.root, repo.getcwd())
- unsure, s = lfdirstate.status(dirtymatch, [], False, False,
- False)
+ unsure, s = lfdirstate.status(dirtymatch, subrepos=[], ignored=False,
+ clean=False, unknown=False)
modifiedfiles = unsure + s.modified + s.added + s.removed
lfiles = listlfiles(repo)
# this only loops through largefiles that exist (not
@@ -573,7 +576,8 @@
# Case 2: user calls commit with specified patterns: refresh
# any matching big files.
smatcher = composestandinmatcher(repo, match)
- standins = repo.dirstate.walk(smatcher, [], False, False)
+ standins = repo.dirstate.walk(smatcher, subrepos=[], unknown=False,
+ ignored=False)
# No matching big files: get out of the way and pass control to
# the usual commit() method.
--- a/hgext/largefiles/overrides.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgext/largefiles/overrides.py Sat Sep 30 07:52:48 2017 -0700
@@ -545,10 +545,10 @@
# Override filemerge to prompt the user about how they wish to merge
# largefiles. This will handle identical edits without prompting the user.
-def overridefilemerge(origfn, premerge, repo, mynode, orig, fcd, fco, fca,
+def overridefilemerge(origfn, premerge, repo, wctx, mynode, orig, fcd, fco, fca,
labels=None):
if not lfutil.isstandin(orig) or fcd.isabsent() or fco.isabsent():
- return origfn(premerge, repo, mynode, orig, fcd, fco, fca,
+ return origfn(premerge, repo, wctx, mynode, orig, fcd, fco, fca,
labels=labels)
ahash = lfutil.readasstandin(fca).lower()
@@ -1218,8 +1218,9 @@
return orig(repo, matcher, prefix, opts, dry_run, similarity)
# Get the list of missing largefiles so we can remove them
lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
- unsure, s = lfdirstate.status(matchmod.always(repo.root, repo.getcwd()), [],
- False, False, False)
+ unsure, s = lfdirstate.status(matchmod.always(repo.root, repo.getcwd()),
+ subrepos=[], ignored=False, clean=False,
+ unknown=False)
# Call into the normal remove code, but the removing of the standin, we want
# to have handled by original addremove. Monkey patching here makes sure
@@ -1403,7 +1404,8 @@
lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
unsure, s = lfdirstate.status(matchmod.always(repo.root,
repo.getcwd()),
- [], False, True, False)
+ subrepos=[], ignored=False,
+ clean=True, unknown=False)
oldclean = set(s.clean)
pctx = repo['.']
dctx = repo[node]
@@ -1432,7 +1434,10 @@
lfdirstate.write()
oldstandins = lfutil.getstandinsstate(repo)
-
+ # Make sure the merge runs on disk, not in-memory. largefiles is not a
+ # good candidate for in-memory merge (large files, custom dirstate,
+ # matcher usage).
+ kwargs['wc'] = repo[None]
result = orig(repo, node, branchmerge, force, *args, **kwargs)
newstandins = lfutil.getstandinsstate(repo)
--- a/hgext/largefiles/remotestore.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgext/largefiles/remotestore.py Sat Sep 30 07:52:48 2017 -0700
@@ -12,7 +12,6 @@
from mercurial import (
error,
util,
- wireproto,
)
from . import (
@@ -109,10 +108,6 @@
'from statlfile (%r)' % stat)
return failed
- def batch(self):
- '''Support for remote batching.'''
- return wireproto.remotebatch(self)
-
def _put(self, hash, fd):
'''Put file with the given hash in the remote store.'''
raise NotImplementedError('abstract method')
--- a/hgext/largefiles/reposetup.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgext/largefiles/reposetup.py Sat Sep 30 07:52:48 2017 -0700
@@ -162,8 +162,10 @@
if sfindirstate(f)]
# Don't waste time getting the ignored and unknown
# files from lfdirstate
- unsure, s = lfdirstate.status(match, [], False, listclean,
- False)
+ unsure, s = lfdirstate.status(match, subrepos=[],
+ ignored=False,
+ clean=listclean,
+ unknown=False)
(modified, added, removed, deleted, clean) = (
s.modified, s.added, s.removed, s.deleted, s.clean)
if parentworking:
--- a/hgext/largefiles/uisetup.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgext/largefiles/uisetup.py Sat Sep 30 07:52:48 2017 -0700
@@ -53,8 +53,7 @@
# The scmutil function is called both by the (trivial) addremove command,
# and in the process of handling commit -A (issue3542)
- entry = extensions.wrapfunction(scmutil, 'addremove',
- overrides.scmutiladdremove)
+ extensions.wrapfunction(scmutil, 'addremove', overrides.scmutiladdremove)
extensions.wrapfunction(cmdutil, 'add', overrides.cmdutiladd)
extensions.wrapfunction(cmdutil, 'remove', overrides.cmdutilremove)
extensions.wrapfunction(cmdutil, 'forget', overrides.cmdutilforget)
@@ -64,8 +63,8 @@
# Subrepos call status function
entry = extensions.wrapcommand(commands.table, 'status',
overrides.overridestatus)
- entry = extensions.wrapfunction(subrepo.hgsubrepo, 'status',
- overrides.overridestatusfn)
+ extensions.wrapfunction(subrepo.hgsubrepo, 'status',
+ overrides.overridestatusfn)
entry = extensions.wrapcommand(commands.table, 'log',
overrides.overridelog)
@@ -111,46 +110,41 @@
pushopt = [('', 'lfrev', [],
_('upload largefiles for these revisions'), _('REV'))]
entry[1].extend(pushopt)
- entry = extensions.wrapfunction(exchange, 'pushoperation',
- overrides.exchangepushoperation)
+ extensions.wrapfunction(exchange, 'pushoperation',
+ overrides.exchangepushoperation)
entry = extensions.wrapcommand(commands.table, 'clone',
overrides.overrideclone)
cloneopt = [('', 'all-largefiles', None,
_('download all versions of all largefiles'))]
entry[1].extend(cloneopt)
- entry = extensions.wrapfunction(hg, 'clone', overrides.hgclone)
- entry = extensions.wrapfunction(hg, 'postshare', overrides.hgpostshare)
+ extensions.wrapfunction(hg, 'clone', overrides.hgclone)
+ extensions.wrapfunction(hg, 'postshare', overrides.hgpostshare)
entry = extensions.wrapcommand(commands.table, 'cat',
overrides.overridecat)
- entry = extensions.wrapfunction(merge, '_checkunknownfile',
- overrides.overridecheckunknownfile)
- entry = extensions.wrapfunction(merge, 'calculateupdates',
- overrides.overridecalculateupdates)
- entry = extensions.wrapfunction(merge, 'recordupdates',
- overrides.mergerecordupdates)
- entry = extensions.wrapfunction(merge, 'update',
- overrides.mergeupdate)
- entry = extensions.wrapfunction(filemerge, '_filemerge',
- overrides.overridefilemerge)
- entry = extensions.wrapfunction(cmdutil, 'copy',
- overrides.overridecopy)
+ extensions.wrapfunction(merge, '_checkunknownfile',
+ overrides.overridecheckunknownfile)
+ extensions.wrapfunction(merge, 'calculateupdates',
+ overrides.overridecalculateupdates)
+ extensions.wrapfunction(merge, 'recordupdates',
+ overrides.mergerecordupdates)
+ extensions.wrapfunction(merge, 'update', overrides.mergeupdate)
+ extensions.wrapfunction(filemerge, '_filemerge',
+ overrides.overridefilemerge)
+ extensions.wrapfunction(cmdutil, 'copy', overrides.overridecopy)
# Summary calls dirty on the subrepos
- entry = extensions.wrapfunction(subrepo.hgsubrepo, 'dirty',
- overrides.overridedirty)
+ extensions.wrapfunction(subrepo.hgsubrepo, 'dirty', overrides.overridedirty)
- entry = extensions.wrapfunction(cmdutil, 'revert',
- overrides.overriderevert)
+ extensions.wrapfunction(cmdutil, 'revert', overrides.overriderevert)
extensions.wrapcommand(commands.table, 'archive',
overrides.overridearchivecmd)
extensions.wrapfunction(archival, 'archive', overrides.overridearchive)
extensions.wrapfunction(subrepo.hgsubrepo, 'archive',
overrides.hgsubrepoarchive)
- extensions.wrapfunction(webcommands, 'archive',
- overrides.hgwebarchive)
+ extensions.wrapfunction(webcommands, 'archive', overrides.hgwebarchive)
extensions.wrapfunction(cmdutil, 'bailifchanged',
overrides.overridebailifchanged)
--- a/hgext/mq.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgext/mq.py Sat Sep 30 07:52:48 2017 -0700
@@ -62,7 +62,7 @@
in the strip extension.
'''
-from __future__ import absolute_import
+from __future__ import absolute_import, print_function
import errno
import os
@@ -80,6 +80,7 @@
cmdutil,
commands,
dirstateguard,
+ encoding,
error,
extensions,
hg,
@@ -108,6 +109,22 @@
# leave the attribute unspecified.
testedwith = 'ships-with-hg-core'
+configtable = {}
+configitem = registrar.configitem(configtable)
+
+configitem('mq', 'git',
+ default='auto',
+)
+configitem('mq', 'keepchanges',
+ default=False,
+)
+configitem('mq', 'plain',
+ default=False,
+)
+configitem('mq', 'secret',
+ default=False,
+)
+
# force load strip extension formerly included in mq and import some utility
try:
stripext = extensions.find('strip')
@@ -153,23 +170,25 @@
def inserthgheader(lines, header, value):
"""Assuming lines contains a HG patch header, add a header line with value.
- >>> try: inserthgheader([], '# Date ', 'z')
- ... except ValueError, inst: print "oops"
+ >>> try: inserthgheader([], b'# Date ', b'z')
+ ... except ValueError as inst: print("oops")
oops
- >>> inserthgheader(['# HG changeset patch'], '# Date ', 'z')
+ >>> inserthgheader([b'# HG changeset patch'], b'# Date ', b'z')
['# HG changeset patch', '# Date z']
- >>> inserthgheader(['# HG changeset patch', ''], '# Date ', 'z')
+ >>> inserthgheader([b'# HG changeset patch', b''], b'# Date ', b'z')
['# HG changeset patch', '# Date z', '']
- >>> inserthgheader(['# HG changeset patch', '# User y'], '# Date ', 'z')
+ >>> inserthgheader([b'# HG changeset patch', b'# User y'], b'# Date ', b'z')
['# HG changeset patch', '# User y', '# Date z']
- >>> inserthgheader(['# HG changeset patch', '# Date x', '# User y'],
- ... '# User ', 'z')
+ >>> inserthgheader([b'# HG changeset patch', b'# Date x', b'# User y'],
+ ... b'# User ', b'z')
['# HG changeset patch', '# Date x', '# User z']
- >>> inserthgheader(['# HG changeset patch', '# Date y'], '# Date ', 'z')
+ >>> inserthgheader([b'# HG changeset patch', b'# Date y'], b'# Date ', b'z')
['# HG changeset patch', '# Date z']
- >>> inserthgheader(['# HG changeset patch', '', '# Date y'], '# Date ', 'z')
+ >>> inserthgheader([b'# HG changeset patch', b'', b'# Date y'],
+ ... b'# Date ', b'z')
['# HG changeset patch', '# Date z', '', '# Date y']
- >>> inserthgheader(['# HG changeset patch', '# Parent y'], '# Date ', 'z')
+ >>> inserthgheader([b'# HG changeset patch', b'# Parent y'],
+ ... b'# Date ', b'z')
['# HG changeset patch', '# Date z', '# Parent y']
"""
start = lines.index('# HG changeset patch') + 1
@@ -193,19 +212,19 @@
def insertplainheader(lines, header, value):
"""For lines containing a plain patch header, add a header line with value.
- >>> insertplainheader([], 'Date', 'z')
+ >>> insertplainheader([], b'Date', b'z')
['Date: z']
- >>> insertplainheader([''], 'Date', 'z')
+ >>> insertplainheader([b''], b'Date', b'z')
['Date: z', '']
- >>> insertplainheader(['x'], 'Date', 'z')
+ >>> insertplainheader([b'x'], b'Date', b'z')
['Date: z', '', 'x']
- >>> insertplainheader(['From: y', 'x'], 'Date', 'z')
+ >>> insertplainheader([b'From: y', b'x'], b'Date', b'z')
['From: y', 'Date: z', '', 'x']
- >>> insertplainheader([' date : x', ' from : y', ''], 'From', 'z')
+ >>> insertplainheader([b' date : x', b' from : y', b''], b'From', b'z')
[' date : x', 'From: z', '']
- >>> insertplainheader(['', 'Date: y'], 'Date', 'z')
+ >>> insertplainheader([b'', b'Date: y'], b'Date', b'z')
['Date: z', '', 'Date: y']
- >>> insertplainheader(['foo: bar', 'DATE: z', 'x'], 'From', 'y')
+ >>> insertplainheader([b'foo: bar', b'DATE: z', b'x'], b'From', b'y')
['From: y', 'foo: bar', 'DATE: z', '', 'x']
"""
newprio = PLAINHEADERS[header.lower()]
@@ -403,7 +422,7 @@
"""
repo = repo.unfiltered()
if phase is None:
- if repo.ui.configbool('mq', 'secret', False):
+ if repo.ui.configbool('mq', 'secret'):
phase = phases.secret
overrides = {('ui', 'allowemptycommit'): True}
if phase is not None:
@@ -441,19 +460,16 @@
self.activeguards = None
self.guardsdirty = False
# Handle mq.git as a bool with extended values
- try:
- gitmode = ui.configbool('mq', 'git', None)
- if gitmode is None:
- raise error.ConfigError
- if gitmode:
- self.gitmode = 'yes'
+ gitmode = ui.config('mq', 'git').lower()
+ boolmode = util.parsebool(gitmode)
+ if boolmode is not None:
+ if boolmode:
+ gitmode = 'yes'
else:
- self.gitmode = 'no'
- except error.ConfigError:
- # let's have check-config ignore the type mismatch
- self.gitmode = ui.config(r'mq', 'git', 'auto').lower()
+ gitmode = 'no'
+ self.gitmode = gitmode
# deprecated config: mq.plain
- self.plainmode = ui.configbool('mq', 'plain', False)
+ self.plainmode = ui.configbool('mq', 'plain')
self.checkapplied = True
@util.propertycache
@@ -1046,7 +1062,7 @@
repo._phasecache
patches = self._revpatches(repo, sorted(revs))
qfinished = self._cleanup(patches, len(patches))
- if qfinished and repo.ui.configbool('mq', 'secret', False):
+ if qfinished and repo.ui.configbool('mq', 'secret'):
# only use this logic when the secret option is added
oldqbase = repo[qfinished[0]]
tphase = repo.ui.config('phases', 'new-commit', phases.draft)
@@ -1209,7 +1225,7 @@
p = self.opener(patchfn, "w")
except IOError as e:
raise error.Abort(_('cannot write patch "%s": %s')
- % (patchfn, e.strerror))
+ % (patchfn, encoding.strtolocal(e.strerror)))
try:
defaultmsg = "[mq]: %s" % patchfn
editor = cmdutil.getcommiteditor(editform=editform)
@@ -1667,15 +1683,15 @@
changes = repo.changelog.read(top)
man = repo.manifestlog[changes[0]].read()
aaa = aa[:]
- matchfn = scmutil.match(repo[None], pats, opts)
+ match1 = scmutil.match(repo[None], pats, opts)
# in short mode, we only diff the files included in the
# patch already plus specified files
if opts.get('short'):
# if amending a patch, we start with existing
# files plus specified files - unfiltered
- match = scmutil.matchfiles(repo, mm + aa + dd + matchfn.files())
+ match = scmutil.matchfiles(repo, mm + aa + dd + match1.files())
# filter with include/exclude options
- matchfn = scmutil.match(repo[None], opts=opts)
+ match1 = scmutil.match(repo[None], opts=opts)
else:
match = scmutil.matchall(repo)
m, a, r, d = repo.status(match=match)[:4]
@@ -1716,8 +1732,8 @@
a = list(aa)
# create 'match' that includes the files to be recommitted.
- # apply matchfn via repo.status to ensure correct case handling.
- cm, ca, cr, cd = repo.status(patchparent, match=matchfn)[:4]
+ # apply match1 via repo.status to ensure correct case handling.
+ cm, ca, cr, cd = repo.status(patchparent, match=match1)[:4]
allmatches = set(cm + ca + cr + cd)
refreshchanges = [x.intersection(allmatches) for x in (mm, aa, dd)]
@@ -1767,7 +1783,7 @@
# file with mtime=0 so status can see it.
mm = []
for i in xrange(len(m) - 1, -1, -1):
- if not matchfn(m[i]):
+ if not match1(m[i]):
mm.append(m[i])
del m[i]
for f in m:
@@ -2151,7 +2167,7 @@
self.added.append(patchname)
imported.append(patchname)
patchname = None
- if rev and repo.ui.configbool('mq', 'secret', False):
+ if rev and repo.ui.configbool('mq', 'secret'):
# if we added anything with --rev, move the secret root
phases.retractboundary(repo, tr, phases.secret, [n])
self.parseseries()
@@ -3410,7 +3426,7 @@
def mqphasedefaults(repo, roots):
"""callback used to set mq changeset as secret when no phase data exists"""
if repo.mq.applied:
- if repo.ui.configbool('mq', 'secret', False):
+ if repo.ui.configbool('mq', 'secret'):
mqphase = phases.secret
else:
mqphase = phases.draft
--- a/hgext/notify.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgext/notify.py Sat Sep 30 07:52:48 2017 -0700
@@ -145,6 +145,7 @@
error,
mail,
patch,
+ registrar,
util,
)
@@ -154,6 +155,49 @@
# leave the attribute unspecified.
testedwith = 'ships-with-hg-core'
+configtable = {}
+configitem = registrar.configitem(configtable)
+
+configitem('notify', 'config',
+ default=None,
+)
+configitem('notify', 'diffstat',
+ default=True,
+)
+configitem('notify', 'domain',
+ default=None,
+)
+configitem('notify', 'fromauthor',
+ default=None,
+)
+configitem('notify', 'maxdiff',
+ default=300,
+)
+configitem('notify', 'maxsubject',
+ default=67,
+)
+configitem('notify', 'mbox',
+ default=None,
+)
+configitem('notify', 'merge',
+ default=True,
+)
+configitem('notify', 'sources',
+ default='serve',
+)
+configitem('notify', 'strip',
+ default=0,
+)
+configitem('notify', 'style',
+ default=None,
+)
+configitem('notify', 'template',
+ default=None,
+)
+configitem('notify', 'test',
+ default=True,
+)
+
# template for single changeset can include email headers.
single_template = '''
Subject: changeset in {webroot}: {desc|firstline|strip}
@@ -187,14 +231,14 @@
if cfg:
self.ui.readconfig(cfg, sections=['usersubs', 'reposubs'])
self.repo = repo
- self.stripcount = int(self.ui.config('notify', 'strip', 0))
+ self.stripcount = int(self.ui.config('notify', 'strip'))
self.root = self.strip(self.repo.root)
self.domain = self.ui.config('notify', 'domain')
self.mbox = self.ui.config('notify', 'mbox')
- self.test = self.ui.configbool('notify', 'test', True)
+ self.test = self.ui.configbool('notify', 'test')
self.charsets = mail._charsets(self.ui)
self.subs = self.subscribers()
- self.merge = self.ui.configbool('notify', 'merge', True)
+ self.merge = self.ui.configbool('notify', 'merge')
mapfile = None
template = (self.ui.config('notify', hooktype) or
@@ -265,7 +309,7 @@
def skipsource(self, source):
'''true if incoming changes from this source should be skipped.'''
- ok_sources = self.ui.config('notify', 'sources', 'serve').split()
+ ok_sources = self.ui.config('notify', 'sources').split()
return source not in ok_sources
def send(self, ctx, count, data):
@@ -316,7 +360,7 @@
else:
s = ctx.description().lstrip().split('\n', 1)[0].rstrip()
subject = '%s: %s' % (self.root, s)
- maxsubject = int(self.ui.config('notify', 'maxsubject', 67))
+ maxsubject = int(self.ui.config('notify', 'maxsubject'))
if maxsubject:
subject = util.ellipsis(subject, maxsubject)
msg['Subject'] = mail.headencode(self.ui, subject,
@@ -350,7 +394,7 @@
def diff(self, ctx, ref=None):
- maxdiff = int(self.ui.config('notify', 'maxdiff', 300))
+ maxdiff = int(self.ui.config('notify', 'maxdiff'))
prev = ctx.p1().node()
if ref:
ref = ref.node()
@@ -360,7 +404,7 @@
opts=patch.diffallopts(self.ui))
difflines = ''.join(chunks).splitlines()
- if self.ui.configbool('notify', 'diffstat', True):
+ if self.ui.configbool('notify', 'diffstat'):
s = patch.diffstat(difflines)
# s may be nil, don't include the header if it is
if s:
--- a/hgext/patchbomb.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgext/patchbomb.py Sat Sep 30 07:52:48 2017 -0700
@@ -99,6 +99,29 @@
cmdtable = {}
command = registrar.command(cmdtable)
+
+configtable = {}
+configitem = registrar.configitem(configtable)
+
+configitem('patchbomb', 'bundletype',
+ default=None,
+)
+configitem('patchbomb', 'confirm',
+ default=False,
+)
+configitem('patchbomb', 'flagtemplate',
+ default=None,
+)
+configitem('patchbomb', 'from',
+ default=None,
+)
+configitem('patchbomb', 'intro',
+ default='auto',
+)
+configitem('patchbomb', 'publicurl',
+ default=None,
+)
+
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
@@ -134,7 +157,7 @@
def introwanted(ui, opts, number):
'''is an introductory message apparently wanted?'''
- introconfig = ui.config('patchbomb', 'intro', 'auto')
+ introconfig = ui.config('patchbomb', 'intro')
if opts.get('intro') or opts.get('desc'):
intro = True
elif introconfig == 'always':
@@ -308,7 +331,8 @@
else:
ui.write(_('\nWrite the introductory message for the '
'patch series.\n\n'))
- body = ui.edit(defaultbody, sender, repopath=repo.path)
+ body = ui.edit(defaultbody, sender, repopath=repo.path,
+ action='patchbombbody')
# Save series description in case sendmail fails
msgfile = repo.vfs('last-email.txt', 'wb')
msgfile.write(body)
--- a/hgext/rebase.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgext/rebase.py Sat Sep 30 07:52:48 2017 -0700
@@ -21,7 +21,6 @@
from mercurial.i18n import _
from mercurial.node import (
- hex,
nullid,
nullrev,
short,
@@ -45,8 +44,8 @@
phases,
registrar,
repair,
- repoview,
revset,
+ revsetlang,
scmutil,
smartset,
util,
@@ -60,13 +59,11 @@
# Indicates that a revision needs to be rebased
revtodo = -1
-nullmerge = -2
-revignored = -3
-# successor in rebase destination
-revprecursor = -4
-# plain prune (no successor)
-revpruned = -5
-revskipped = (revignored, revprecursor, revpruned)
+revtodostr = '-1'
+
+# legacy revstates no longer needed in current code
+# -2: nullmerge, -3: revignored, -4: revprecursor, -5: revpruned
+legacystates = {'-2', '-3', '-4', '-5'}
cmdtable = {}
command = registrar.command(cmdtable)
@@ -123,13 +120,37 @@
sourceset = revset.getset(repo, smartset.fullreposet(repo), x)
return subset & smartset.baseset([_destrebase(repo, sourceset)])
+def _ctxdesc(ctx):
+ """short description for a context"""
+ desc = '%d:%s "%s"' % (ctx.rev(), ctx,
+ ctx.description().split('\n', 1)[0])
+ repo = ctx.repo()
+ names = []
+ for nsname, ns in repo.names.iteritems():
+ if nsname == 'branches':
+ continue
+ names.extend(ns.names(repo, ctx.node()))
+ if names:
+ desc += ' (%s)' % ' '.join(names)
+ return desc
+
class rebaseruntime(object):
"""This class is a container for rebase runtime state"""
def __init__(self, repo, ui, opts=None):
if opts is None:
opts = {}
- self.repo = repo
+ # prepared: whether we have rebasestate prepared or not. Currently it
+ # decides whether "self.repo" is unfiltered or not.
+ # The rebasestate has explicit hash to hash instructions not depending
+ # on visibility. If rebasestate exists (in-memory or on-disk), use
+ # unfiltered repo to avoid visibility issues.
+ # Before knowing rebasestate (i.e. when starting a new rebase (not
+ # --continue or --abort)), the original repo should be used so
+ # visibility-dependent revsets are correct.
+ self.prepared = False
+ self._repo = repo
+
self.ui = ui
self.opts = opts
self.originalwd = None
@@ -139,9 +160,8 @@
# dict will be what contains most of the rebase progress state.
self.state = {}
self.activebookmark = None
- self.dest = None
+ self.destmap = {}
self.skipped = set()
- self.destancestors = set()
self.collapsef = opts.get('collapse', False)
self.collapsemsg = cmdutil.logmessage(ui, opts)
@@ -159,6 +179,13 @@
self.keepopen = opts.get('keepopen', False)
self.obsoletenotrebased = {}
+ @property
+ def repo(self):
+ if self.prepared:
+ return self._repo.unfiltered()
+ else:
+ return self._repo
+
def storestatus(self, tr=None):
"""Store the current status to allow recovery"""
if tr:
@@ -169,36 +196,39 @@
self._writestatus(f)
def _writestatus(self, f):
- repo = self.repo.unfiltered()
+ repo = self.repo
+ assert repo.filtername is None
f.write(repo[self.originalwd].hex() + '\n')
- f.write(repo[self.dest].hex() + '\n')
+ # was "dest". we now write dest per src root below.
+ f.write('\n')
f.write(repo[self.external].hex() + '\n')
f.write('%d\n' % int(self.collapsef))
f.write('%d\n' % int(self.keepf))
f.write('%d\n' % int(self.keepbranchesf))
f.write('%s\n' % (self.activebookmark or ''))
+ destmap = self.destmap
for d, v in self.state.iteritems():
oldrev = repo[d].hex()
if v >= 0:
newrev = repo[v].hex()
- elif v == revtodo:
- # To maintain format compatibility, we have to use nullid.
- # Please do remove this special case when upgrading the format.
- newrev = hex(nullid)
else:
newrev = v
- f.write("%s:%s\n" % (oldrev, newrev))
+ destnode = repo[destmap[d]].hex()
+ f.write("%s:%s:%s\n" % (oldrev, newrev, destnode))
repo.ui.debug('rebase status stored\n')
def restorestatus(self):
"""Restore a previously stored status"""
+ self.prepared = True
repo = self.repo
+ assert repo.filtername is None
keepbranches = None
- dest = None
+ legacydest = None
collapse = False
external = nullrev
activebookmark = None
state = {}
+ destmap = {}
try:
f = repo.vfs("rebasestate")
@@ -206,7 +236,10 @@
if i == 0:
originalwd = repo[l].rev()
elif i == 1:
- dest = repo[l].rev()
+ # this line should be empty in newer version. but legacy
+ # clients may still use it
+ if l:
+ legacydest = repo[l].rev()
elif i == 2:
external = repo[l].rev()
elif i == 3:
@@ -221,11 +254,17 @@
# oldrev:newrev lines
activebookmark = l
else:
- oldrev, newrev = l.split(':')
- if newrev in (str(nullmerge), str(revignored),
- str(revprecursor), str(revpruned)):
- state[repo[oldrev].rev()] = int(newrev)
- elif newrev == nullid:
+ args = l.split(':')
+ oldrev = args[0]
+ newrev = args[1]
+ if newrev in legacystates:
+ continue
+ if len(args) > 2:
+ destnode = args[2]
+ else:
+ destnode = legacydest
+ destmap[repo[oldrev].rev()] = repo[destnode].rev()
+ if newrev in (nullid, revtodostr):
state[repo[oldrev].rev()] = revtodo
# Legacy compat special case
else:
@@ -242,7 +281,7 @@
skipped = set()
# recompute the set of skipped revs
if not collapse:
- seen = {dest}
+ seen = set(destmap.values())
for old, new in sorted(state.items()):
if new != revtodo and new in seen:
skipped.add(old)
@@ -250,10 +289,9 @@
repo.ui.debug('computed skipped revs: %s\n' %
(' '.join(str(r) for r in sorted(skipped)) or None))
repo.ui.debug('rebase status resumed\n')
- _setrebasesetvisibility(repo, set(state.keys()) | {originalwd})
self.originalwd = originalwd
- self.dest = dest
+ self.destmap = destmap
self.state = state
self.skipped = skipped
self.collapsef = collapse
@@ -262,23 +300,21 @@
self.external = external
self.activebookmark = activebookmark
- def _handleskippingobsolete(self, rebaserevs, obsoleterevs, dest):
+ def _handleskippingobsolete(self, obsoleterevs, destmap):
"""Compute structures necessary for skipping obsolete revisions
- rebaserevs: iterable of all revisions that are to be rebased
obsoleterevs: iterable of all obsolete revisions in rebaseset
- dest: a destination revision for the rebase operation
+ destmap: {srcrev: destrev} destination revisions
"""
self.obsoletenotrebased = {}
if not self.ui.configbool('experimental', 'rebaseskipobsolete',
default=True):
return
- rebaseset = set(rebaserevs)
obsoleteset = set(obsoleterevs)
self.obsoletenotrebased = _computeobsoletenotrebased(self.repo,
- obsoleteset, dest)
+ obsoleteset, destmap)
skippedset = set(self.obsoletenotrebased)
- _checkobsrebase(self.repo, self.ui, obsoleteset, rebaseset, skippedset)
+ _checkobsrebase(self.repo, self.ui, obsoleteset, skippedset)
def _prepareabortorcontinue(self, isabort):
try:
@@ -296,16 +332,14 @@
hint = _('use "hg rebase --abort" to clear broken state')
raise error.Abort(msg, hint=hint)
if isabort:
- return abort(self.repo, self.originalwd, self.dest,
+ return abort(self.repo, self.originalwd, self.destmap,
self.state, activebookmark=self.activebookmark)
- obsrevs = (r for r, st in self.state.items() if st == revprecursor)
- self._handleskippingobsolete(self.state.keys(), obsrevs, self.dest)
-
- def _preparenewrebase(self, dest, rebaseset):
- if dest is None:
+ def _preparenewrebase(self, destmap):
+ if not destmap:
return _nothingtorebase()
+ rebaseset = destmap.keys()
allowunstable = obsolete.isenabled(self.repo, obsolete.allowunstableopt)
if (not (self.keepf or allowunstable)
and self.repo.revs('first(children(%ld) - %ld)',
@@ -315,11 +349,7 @@
" unrebased descendants"),
hint=_('use --keep to keep original changesets'))
- obsrevs = _filterobsoleterevs(self.repo, set(rebaseset))
- self._handleskippingobsolete(rebaseset, obsrevs, dest.rev())
-
- result = buildstate(self.repo, dest, rebaseset, self.collapsef,
- self.obsoletenotrebased)
+ result = buildstate(self.repo, destmap, self.collapsef)
if not result:
# Empty state built, nothing to rebase
@@ -332,19 +362,26 @@
% root,
hint=_("see 'hg help phases' for details"))
- (self.originalwd, self.dest, self.state) = result
+ (self.originalwd, self.destmap, self.state) = result
if self.collapsef:
- self.destancestors = self.repo.changelog.ancestors(
- [self.dest],
- inclusive=True)
- self.external = externalparent(self.repo, self.state,
- self.destancestors)
+ dests = set(self.destmap.values())
+ if len(dests) != 1:
+ raise error.Abort(
+ _('--collapse does not work with multiple destinations'))
+ destrev = next(iter(dests))
+ destancestors = self.repo.changelog.ancestors([destrev],
+ inclusive=True)
+ self.external = externalparent(self.repo, self.state, destancestors)
- if dest.closesbranch() and not self.keepbranchesf:
- self.ui.status(_('reopening closed branch head %s\n') % dest)
+ for destrev in sorted(set(destmap.values())):
+ dest = self.repo[destrev]
+ if dest.closesbranch() and not self.keepbranchesf:
+ self.ui.status(_('reopening closed branch head %s\n') % dest)
+
+ self.prepared = True
def _performrebase(self, tr):
- repo, ui, opts = self.repo, self.ui, self.opts
+ repo, ui = self.repo, self.ui
if self.keepbranchesf:
# insert _savebranch at the start of extrafns so if
# there's a user-provided extrafn it can clobber branch if
@@ -358,10 +395,9 @@
raise error.Abort(_('cannot collapse multiple named '
'branches'))
- # Rebase
- if not self.destancestors:
- self.destancestors = repo.changelog.ancestors([self.dest],
- inclusive=True)
+ # Calculate self.obsoletenotrebased
+ obsrevs = _filterobsoleterevs(self.repo, self.state)
+ self._handleskippingobsolete(obsrevs, self.destmap)
# Keep track of the active bookmarks in order to reset them later
self.activebookmark = self.activebookmark or repo._activebookmark
@@ -372,27 +408,47 @@
# if we fail before the transaction closes.
self.storestatus()
- sortedrevs = repo.revs('sort(%ld, -topo)', self.state)
cands = [k for k, v in self.state.iteritems() if v == revtodo]
total = len(cands)
pos = 0
+ for subset in sortsource(self.destmap):
+ pos = self._performrebasesubset(tr, subset, pos, total)
+ ui.progress(_('rebasing'), None)
+ ui.note(_('rebase merging completed\n'))
+
+ def _performrebasesubset(self, tr, subset, pos, total):
+ repo, ui, opts = self.repo, self.ui, self.opts
+ sortedrevs = repo.revs('sort(%ld, -topo)', subset)
for rev in sortedrevs:
+ dest = self.destmap[rev]
ctx = repo[rev]
- desc = '%d:%s "%s"' % (ctx.rev(), ctx,
- ctx.description().split('\n', 1)[0])
- names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
- if names:
- desc += ' (%s)' % ' '.join(names)
+ desc = _ctxdesc(ctx)
if self.state[rev] == rev:
ui.status(_('already rebased %s\n') % desc)
+ elif rev in self.obsoletenotrebased:
+ succ = self.obsoletenotrebased[rev]
+ if succ is None:
+ msg = _('note: not rebasing %s, it has no '
+ 'successor\n') % desc
+ else:
+ succdesc = _ctxdesc(repo[succ])
+ msg = (_('note: not rebasing %s, already in '
+ 'destination as %s\n') % (desc, succdesc))
+ repo.ui.status(msg)
+ # Make clearrebased aware state[rev] is not a true successor
+ self.skipped.add(rev)
+ # Record rev as moved to its desired destination in self.state.
+ # This helps bookmark and working parent movement.
+ dest = max(adjustdest(repo, rev, self.destmap, self.state,
+ self.skipped))
+ self.state[rev] = dest
elif self.state[rev] == revtodo:
pos += 1
ui.status(_('rebasing %s\n') % desc)
ui.progress(_("rebasing"), pos, ("%d:%s" % (rev, ctx)),
_('changesets'), total)
- p1, p2, base = defineparents(repo, rev, self.dest,
- self.state,
- self.destancestors,
+ p1, p2, base = defineparents(repo, rev, self.destmap,
+ self.state, self.skipped,
self.obsoletenotrebased)
self.storestatus(tr=tr)
storecollapsemsg(repo, self.collapsemsg)
@@ -403,7 +459,7 @@
ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
'rebase')
stats = rebasenode(repo, rev, p1, base, self.state,
- self.collapsef, self.dest)
+ self.collapsef, dest)
if stats and stats[3] > 0:
raise error.InterventionRequired(
_('unresolved conflicts (see hg '
@@ -439,32 +495,16 @@
self.skipped.add(rev)
self.state[rev] = p1
ui.debug('next revision set to %s\n' % p1)
- elif self.state[rev] == nullmerge:
- ui.debug('ignoring null merge rebase of %s\n' % rev)
- elif self.state[rev] == revignored:
- ui.status(_('not rebasing ignored %s\n') % desc)
- elif self.state[rev] == revprecursor:
- destctx = repo[self.obsoletenotrebased[rev]]
- descdest = '%d:%s "%s"' % (destctx.rev(), destctx,
- destctx.description().split('\n', 1)[0])
- msg = _('note: not rebasing %s, already in destination as %s\n')
- ui.status(msg % (desc, descdest))
- elif self.state[rev] == revpruned:
- msg = _('note: not rebasing %s, it has no successor\n')
- ui.status(msg % desc)
else:
ui.status(_('already rebased %s as %s\n') %
(desc, repo[self.state[rev]]))
-
- ui.progress(_('rebasing'), None)
- ui.note(_('rebase merging completed\n'))
+ return pos
def _finishrebase(self):
repo, ui, opts = self.repo, self.ui, self.opts
if self.collapsef and not self.keepopen:
- p1, p2, _base = defineparents(repo, min(self.state),
- self.dest, self.state,
- self.destancestors,
+ p1, p2, _base = defineparents(repo, min(self.state), self.destmap,
+ self.state, self.skipped,
self.obsoletenotrebased)
editopt = opts.get('edit')
editform = 'rebase.collapse'
@@ -473,24 +513,25 @@
else:
commitmsg = 'Collapsed revision'
for rebased in sorted(self.state):
- if rebased not in self.skipped and\
- self.state[rebased] > nullmerge:
+ if rebased not in self.skipped:
commitmsg += '\n* %s' % repo[rebased].description()
editopt = True
editor = cmdutil.getcommiteditor(edit=editopt, editform=editform)
revtoreuse = max(self.state)
- newnode = concludenode(repo, revtoreuse, p1, self.external,
- commitmsg=commitmsg,
- extrafn=_makeextrafn(self.extrafns),
- editor=editor,
- keepbranches=self.keepbranchesf,
- date=self.date)
- if newnode is None:
- newrev = self.dest
- else:
+
+ dsguard = None
+ if ui.configbool('rebase', 'singletransaction'):
+ dsguard = dirstateguard.dirstateguard(repo, 'rebase')
+ with util.acceptintervention(dsguard):
+ newnode = concludenode(repo, revtoreuse, p1, self.external,
+ commitmsg=commitmsg,
+ extrafn=_makeextrafn(self.extrafns),
+ editor=editor,
+ keepbranches=self.keepbranchesf,
+ date=self.date)
+ if newnode is not None:
newrev = repo[newnode].rev()
- for oldrev in self.state.iterkeys():
- if self.state[oldrev] > nullmerge:
+ for oldrev in self.state.iterkeys():
self.state[oldrev] = newrev
if 'qtip' in repo.tags():
@@ -499,9 +540,7 @@
# restore original working directory
# (we do this before stripping)
newwd = self.state.get(self.originalwd, self.originalwd)
- if newwd == revprecursor:
- newwd = self.obsoletenotrebased[self.originalwd]
- elif newwd < 0:
+ if newwd < 0:
# original directory is a parent of rebase set root or ignored
newwd = self.originalwd
if newwd not in [c.rev() for c in repo[None].parents()]:
@@ -512,7 +551,7 @@
if not self.keepf:
if self.collapsef:
collapsedas = newnode
- clearrebased(ui, repo, self.dest, self.state, self.skipped,
+ clearrebased(ui, repo, self.destmap, self.state, self.skipped,
collapsedas, self.keepf)
clearstatus(repo)
@@ -704,24 +743,29 @@
if retcode is not None:
return retcode
else:
- dest, rebaseset = _definesets(ui, repo, destf, srcf, basef, revf,
- destspace=destspace)
- retcode = rbsrt._preparenewrebase(dest, rebaseset)
+ destmap = _definedestmap(ui, repo, destf, srcf, basef, revf,
+ destspace=destspace)
+ retcode = rbsrt._preparenewrebase(destmap)
if retcode is not None:
return retcode
tr = None
- if ui.configbool('rebase', 'singletransaction'):
+ dsguard = None
+
+ singletr = ui.configbool('rebase', 'singletransaction')
+ if singletr:
tr = repo.transaction('rebase')
with util.acceptintervention(tr):
- rbsrt._performrebase(tr)
+ if singletr:
+ dsguard = dirstateguard.dirstateguard(repo, 'rebase')
+ with util.acceptintervention(dsguard):
+ rbsrt._performrebase(tr)
rbsrt._finishrebase()
-def _definesets(ui, repo, destf=None, srcf=None, basef=None, revf=None,
- destspace=None):
- """use revisions argument to define destination and rebase set
- """
+def _definedestmap(ui, repo, destf=None, srcf=None, basef=None, revf=None,
+ destspace=None):
+ """use revisions argument to define destmap {srcrev: destrev}"""
if revf is None:
revf = []
@@ -741,19 +785,18 @@
raise error.Abort(_('you must specify a destination'),
hint=_('use: hg rebase -d REV'))
- if destf:
- dest = scmutil.revsingle(repo, destf)
+ dest = None
if revf:
rebaseset = scmutil.revrange(repo, revf)
if not rebaseset:
ui.status(_('empty "rev" revision set - nothing to rebase\n'))
- return None, None
+ return None
elif srcf:
src = scmutil.revrange(repo, [srcf])
if not src:
ui.status(_('empty "source" revision set - nothing to rebase\n'))
- return None, None
+ return None
rebaseset = repo.revs('(%ld)::', src)
assert rebaseset
else:
@@ -761,8 +804,11 @@
if not base:
ui.status(_('empty "base" revision set - '
"can't compute rebase set\n"))
- return None, None
- if not destf:
+ return None
+ if destf:
+ # --base does not support multiple destinations
+ dest = scmutil.revsingle(repo, destf)
+ else:
dest = repo[_destrebase(repo, base, destspace=destspace)]
destf = str(dest)
@@ -805,13 +851,48 @@
else: # can it happen?
ui.status(_('nothing to rebase from %s to %s\n') %
('+'.join(str(repo[r]) for r in base), dest))
- return None, None
+ return None
if not destf:
dest = repo[_destrebase(repo, rebaseset, destspace=destspace)]
destf = str(dest)
- return dest, rebaseset
+ allsrc = revsetlang.formatspec('%ld', rebaseset)
+ alias = {'ALLSRC': allsrc}
+
+ if dest is None:
+ try:
+ # fast path: try to resolve dest without SRC alias
+ dest = scmutil.revsingle(repo, destf, localalias=alias)
+ except error.RepoLookupError:
+ if not ui.configbool('experimental', 'rebase.multidest'):
+ raise
+ # multi-dest path: resolve dest for each SRC separately
+ destmap = {}
+ for r in rebaseset:
+ alias['SRC'] = revsetlang.formatspec('%d', r)
+ # use repo.anyrevs instead of scmutil.revsingle because we
+ # don't want to abort if destset is empty.
+ destset = repo.anyrevs([destf], user=True, localalias=alias)
+ size = len(destset)
+ if size == 1:
+ destmap[r] = destset.first()
+ elif size == 0:
+ ui.note(_('skipping %s - empty destination\n') % repo[r])
+ else:
+ raise error.Abort(_('rebase destination for %s is not '
+ 'unique') % repo[r])
+
+ if dest is not None:
+ # single-dest case: assign dest to each rev in rebaseset
+ destrev = dest.rev()
+ destmap = {r: destrev for r in rebaseset} # {srcrev: destrev}
+
+ if not destmap:
+ ui.status(_('nothing to rebase - empty destination\n'))
+ return None
+
+ return destmap
def externalparent(repo, state, destancestors):
"""Return the revision that should be used as the second parent
@@ -841,8 +922,10 @@
'''Commit the wd changes with parents p1 and p2. Reuse commit info from rev
but also store useful information in extra.
Return node of committed revision.'''
- dsguard = dirstateguard.dirstateguard(repo, 'rebase')
- try:
+ dsguard = util.nullcontextmanager()
+ if not repo.ui.configbool('rebase', 'singletransaction'):
+ dsguard = dirstateguard.dirstateguard(repo, 'rebase')
+ with dsguard:
repo.setparents(repo[p1].node(), repo[p2].node())
ctx = repo[rev]
if commitmsg is None:
@@ -864,10 +947,7 @@
date=date, extra=extra, editor=editor)
repo.dirstate.setbranch(repo[newnode].branch())
- dsguard.close()
return newnode
- finally:
- release(dsguard)
def rebasenode(repo, rev, p1, base, state, collapse, dest):
'Rebase a single revision rev on top of p1 using base as merge ancestor'
@@ -898,15 +978,15 @@
copies.duplicatecopies(repo, rev, p1rev, skiprev=dest)
return stats
-def adjustdest(repo, rev, dest, state):
+def adjustdest(repo, rev, destmap, state, skipped):
"""adjust rebase destination given the current rebase state
rev is what is being rebased. Return a list of two revs, which are the
adjusted destinations for rev's p1 and p2, respectively. If a parent is
nullrev, return dest without adjustment for it.
- For example, when doing rebase -r B+E -d F, rebase will first move B to B1,
- and E's destination will be adjusted from F to B1.
+ For example, when doing rebasing B+E to F, C to G, rebase will first move B
+ to B1, and E's destination will be adjusted from F to B1.
B1 <- written during rebasing B
|
@@ -918,11 +998,11 @@
| |
| x <- skipped, ex. no successor or successor in (::dest)
| |
- | C
+ | C <- rebased as C', different destination
| |
- | B <- rebased as B1
- |/
- A
+ | B <- rebased as B1 C'
+ |/ |
+ A G <- destination of C, different
Another example about merge changeset, rebase -r C+G+H -d K, rebase will
first move C to C1, G to G1, and when it's checking H, the adjusted
@@ -937,34 +1017,45 @@
| B | ...
|/ |/
A A
+
+ Besides, adjust dest according to existing rebase information. For example,
+
+ B C D B needs to be rebased on top of C, C needs to be rebased on top
+ \|/ of D. We will rebase C first.
+ A
+
+ C' After rebasing C, when considering B's destination, use C'
+ | instead of the original C.
+ B D
+ \ /
+ A
"""
+ # pick already rebased revs with same dest from state as interesting source
+ dest = destmap[rev]
+ source = [s for s, d in state.items()
+ if d > 0 and destmap[s] == dest and s not in skipped]
+
result = []
for prev in repo.changelog.parentrevs(rev):
adjusted = dest
if prev != nullrev:
- # pick already rebased revs from state
- source = [s for s, d in state.items() if d > 0]
candidate = repo.revs('max(%ld and (::%d))', source, prev).first()
if candidate is not None:
adjusted = state[candidate]
+ if adjusted == dest and dest in state:
+ adjusted = state[dest]
+ if adjusted == revtodo:
+ # sortsource should produce an order that makes this impossible
+ raise error.ProgrammingError(
+ 'rev %d should be rebased already at this time' % dest)
result.append(adjusted)
return result
-def nearestrebased(repo, rev, state):
- """return the nearest ancestors of rev in the rebase result"""
- rebased = [r for r in state if state[r] > nullmerge]
- candidates = repo.revs('max(%ld and (::%d))', rebased, rev)
- if candidates:
- return state[candidates.first()]
- else:
- return None
-
-def _checkobsrebase(repo, ui, rebaseobsrevs, rebasesetrevs, rebaseobsskipped):
+def _checkobsrebase(repo, ui, rebaseobsrevs, rebaseobsskipped):
"""
Abort if rebase will create divergence or rebase is noop because of markers
`rebaseobsrevs`: set of obsolete revision in source
- `rebasesetrevs`: set of revisions to be rebased from source
`rebaseobsskipped`: set of revisions from source skipped because they have
successors in destination
"""
@@ -982,107 +1073,202 @@
"experimental.allowdivergence=True")
raise error.Abort(msg % (",".join(divhashes),), hint=h)
-def defineparents(repo, rev, dest, state, destancestors,
- obsoletenotrebased):
- 'Return the new parent relationship of the revision that will be rebased'
- parents = repo[rev].parents()
- p1 = p2 = nullrev
- rp1 = None
+def successorrevs(unfi, rev):
+ """yield revision numbers for successors of rev"""
+ assert unfi.filtername is None
+ nodemap = unfi.changelog.nodemap
+ for s in obsutil.allsuccessors(unfi.obsstore, [unfi[rev].node()]):
+ if s in nodemap:
+ yield nodemap[s]
+
+def defineparents(repo, rev, destmap, state, skipped, obsskipped):
+ """Return new parents and optionally a merge base for rev being rebased
+
+ The destination specified by "dest" cannot always be used directly because
+ previously rebase result could affect destination. For example,
+
+ D E rebase -r C+D+E -d B
+ |/ C will be rebased to C'
+ B C D's new destination will be C' instead of B
+ |/ E's new destination will be C' instead of B
+ A
- p1n = parents[0].rev()
- if p1n in destancestors:
- p1 = dest
- elif p1n in state:
- if state[p1n] == nullmerge:
- p1 = dest
- elif state[p1n] in revskipped:
- p1 = nearestrebased(repo, p1n, state)
- if p1 is None:
- p1 = dest
- else:
- p1 = state[p1n]
- else: # p1n external
- p1 = dest
- p2 = p1n
+ The new parents of a merge is slightly more complicated. See the comment
+ block below.
+ """
+ # use unfiltered changelog since successorrevs may return filtered nodes
+ assert repo.filtername is None
+ cl = repo.changelog
+ def isancestor(a, b):
+ # take revision numbers instead of nodes
+ if a == b:
+ return True
+ elif a > b:
+ return False
+ return cl.isancestor(cl.node(a), cl.node(b))
+
+ dest = destmap[rev]
+ oldps = repo.changelog.parentrevs(rev) # old parents
+ newps = [nullrev, nullrev] # new parents
+ dests = adjustdest(repo, rev, destmap, state, skipped)
+ bases = list(oldps) # merge base candidates, initially just old parents
- if len(parents) == 2 and parents[1].rev() not in destancestors:
- p2n = parents[1].rev()
- # interesting second parent
- if p2n in state:
- if p1 == dest: # p1n in destancestors or external
- p1 = state[p2n]
- if p1 == revprecursor:
- rp1 = obsoletenotrebased[p2n]
- elif state[p2n] in revskipped:
- p2 = nearestrebased(repo, p2n, state)
- if p2 is None:
- # no ancestors rebased yet, detach
- p2 = dest
- else:
- p2 = state[p2n]
- else: # p2n external
- if p2 != nullrev: # p1n external too => rev is a merged revision
- raise error.Abort(_('cannot use revision %d as base, result '
- 'would have 3 parents') % rev)
- p2 = p2n
- repo.ui.debug(" future parents are %d and %d\n" %
- (repo[rp1 or p1].rev(), repo[p2].rev()))
+ if all(r == nullrev for r in oldps[1:]):
+ # For non-merge changeset, just move p to adjusted dest as requested.
+ newps[0] = dests[0]
+ else:
+ # For merge changeset, if we move p to dests[i] unconditionally, both
+ # parents may change and the end result looks like "the merge loses a
+ # parent", which is a surprise. This is a limit because "--dest" only
+ # accepts one dest per src.
+ #
+ # Therefore, only move p with reasonable conditions (in this order):
+ # 1. use dest, if dest is a descendent of (p or one of p's successors)
+ # 2. use p's rebased result, if p is rebased (state[p] > 0)
+ #
+ # Comparing with adjustdest, the logic here does some additional work:
+ # 1. decide which parents will not be moved towards dest
+ # 2. if the above decision is "no", should a parent still be moved
+ # because it was rebased?
+ #
+ # For example:
+ #
+ # C # "rebase -r C -d D" is an error since none of the parents
+ # /| # can be moved. "rebase -r B+C -d D" will move C's parent
+ # A B D # B (using rule "2."), since B will be rebased.
+ #
+ # The loop tries to be not rely on the fact that a Mercurial node has
+ # at most 2 parents.
+ for i, p in enumerate(oldps):
+ np = p # new parent
+ if any(isancestor(x, dests[i]) for x in successorrevs(repo, p)):
+ np = dests[i]
+ elif p in state and state[p] > 0:
+ np = state[p]
- if not any(p.rev() in state for p in parents):
- # Case (1) root changeset of a non-detaching rebase set.
- # Let the merge mechanism find the base itself.
+ # "bases" only record "special" merge bases that cannot be
+ # calculated from changelog DAG (i.e. isancestor(p, np) is False).
+ # For example:
+ #
+ # B' # rebase -s B -d D, when B was rebased to B'. dest for C
+ # | C # is B', but merge base for C is B, instead of
+ # D | # changelog.ancestor(C, B') == A. If changelog DAG and
+ # | B # "state" edges are merged (so there will be an edge from
+ # |/ # B to B'), the merge base is still ancestor(C, B') in
+ # A # the merged graph.
+ #
+ # Also see https://bz.mercurial-scm.org/show_bug.cgi?id=1950#c8
+ # which uses "virtual null merge" to explain this situation.
+ if isancestor(p, np):
+ bases[i] = nullrev
+
+ # If one parent becomes an ancestor of the other, drop the ancestor
+ for j, x in enumerate(newps[:i]):
+ if x == nullrev:
+ continue
+ if isancestor(np, x): # CASE-1
+ np = nullrev
+ elif isancestor(x, np): # CASE-2
+ newps[j] = np
+ np = nullrev
+ # New parents forming an ancestor relationship does not
+ # mean the old parents have a similar relationship. Do not
+ # set bases[x] to nullrev.
+ bases[j], bases[i] = bases[i], bases[j]
+
+ newps[i] = np
+
+ # "rebasenode" updates to new p1, and the old p1 will be used as merge
+ # base. If only p2 changes, merging using unchanged p1 as merge base is
+ # suboptimal. Therefore swap parents to make the merge sane.
+ if newps[1] != nullrev and oldps[0] == newps[0]:
+ assert len(newps) == 2 and len(oldps) == 2
+ newps.reverse()
+ bases.reverse()
+
+ # No parent change might be an error because we fail to make rev a
+ # descendent of requested dest. This can happen, for example:
+ #
+ # C # rebase -r C -d D
+ # /| # None of A and B will be changed to D and rebase fails.
+ # A B D
+ if set(newps) == set(oldps) and dest not in newps:
+ raise error.Abort(_('cannot rebase %d:%s without '
+ 'moving at least one of its parents')
+ % (rev, repo[rev]))
+
+ # Source should not be ancestor of dest. The check here guarantees it's
+ # impossible. With multi-dest, the initial check does not cover complex
+ # cases since we don't have abstractions to dry-run rebase cheaply.
+ if any(p != nullrev and isancestor(rev, p) for p in newps):
+ raise error.Abort(_('source is ancestor of destination'))
+
+ # "rebasenode" updates to new p1, use the corresponding merge base.
+ if bases[0] != nullrev:
+ base = bases[0]
+ else:
base = None
- elif not repo[rev].p2():
- # Case (2) detaching the node with a single parent, use this parent
- base = repo[rev].p1().rev()
- else:
- # Assuming there is a p1, this is the case where there also is a p2.
- # We are thus rebasing a merge and need to pick the right merge base.
- #
- # Imagine we have:
- # - M: current rebase revision in this step
- # - A: one parent of M
- # - B: other parent of M
- # - D: destination of this merge step (p1 var)
- #
- # Consider the case where D is a descendant of A or B and the other is
- # 'outside'. In this case, the right merge base is the D ancestor.
- #
- # An informal proof, assuming A is 'outside' and B is the D ancestor:
- #
- # If we pick B as the base, the merge involves:
- # - changes from B to M (actual changeset payload)
- # - changes from B to D (induced by rebase) as D is a rebased
- # version of B)
- # Which exactly represent the rebase operation.
- #
- # If we pick A as the base, the merge involves:
- # - changes from A to M (actual changeset payload)
- # - changes from A to D (with include changes between unrelated A and B
- # plus changes induced by rebase)
- # Which does not represent anything sensible and creates a lot of
- # conflicts. A is thus not the right choice - B is.
- #
- # Note: The base found in this 'proof' is only correct in the specified
- # case. This base does not make sense if is not D a descendant of A or B
- # or if the other is not parent 'outside' (especially not if the other
- # parent has been rebased). The current implementation does not
- # make it feasible to consider different cases separately. In these
- # other cases we currently just leave it to the user to correctly
- # resolve an impossible merge using a wrong ancestor.
- #
- # xx, p1 could be -4, and both parents could probably be -4...
- for p in repo[rev].parents():
- if state.get(p.rev()) == p1:
- base = p.rev()
- break
- else: # fallback when base not found
- base = None
+
+ # Check if the merge will contain unwanted changes. That may happen if
+ # there are multiple special (non-changelog ancestor) merge bases, which
+ # cannot be handled well by the 3-way merge algorithm. For example:
+ #
+ # F
+ # /|
+ # D E # "rebase -r D+E+F -d Z", when rebasing F, if "D" was chosen
+ # | | # as merge base, the difference between D and F will include
+ # B C # C, so the rebased F will contain C surprisingly. If "E" was
+ # |/ # chosen, the rebased F will contain B.
+ # A Z
+ #
+ # But our merge base candidates (D and E in above case) could still be
+ # better than the default (ancestor(F, Z) == null). Therefore still
+ # pick one (so choose p1 above).
+ if sum(1 for b in bases if b != nullrev) > 1:
+ unwanted = [None, None] # unwanted[i]: unwanted revs if choose bases[i]
+ for i, base in enumerate(bases):
+ if base == nullrev:
+ continue
+ # Revisions in the side (not chosen as merge base) branch that
+ # might contain "surprising" contents
+ siderevs = list(repo.revs('((%ld-%d) %% (%d+%d))',
+ bases, base, base, dest))
- # Raise because this function is called wrong (see issue 4106)
- raise AssertionError('no base found to rebase on '
- '(defineparents called wrong)')
- return rp1 or p1, p2, base
+ # If those revisions are covered by rebaseset, the result is good.
+ # A merge in rebaseset would be considered to cover its ancestors.
+ if siderevs:
+ rebaseset = [r for r, d in state.items()
+ if d > 0 and r not in obsskipped]
+ merges = [r for r in rebaseset
+ if cl.parentrevs(r)[1] != nullrev]
+ unwanted[i] = list(repo.revs('%ld - (::%ld) - %ld',
+ siderevs, merges, rebaseset))
+
+ # Choose a merge base that has a minimal number of unwanted revs.
+ l, i = min((len(revs), i)
+ for i, revs in enumerate(unwanted) if revs is not None)
+ base = bases[i]
+
+ # newps[0] should match merge base if possible. Currently, if newps[i]
+ # is nullrev, the only case is newps[i] and newps[j] (j < i), one is
+ # the other's ancestor. In that case, it's fine to not swap newps here.
+ # (see CASE-1 and CASE-2 above)
+ if i != 0 and newps[i] != nullrev:
+ newps[0], newps[i] = newps[i], newps[0]
+
+ # The merge will include unwanted revisions. Abort now. Revisit this if
+ # we have a more advanced merge algorithm that handles multiple bases.
+ if l > 0:
+ unwanteddesc = _(' or ').join(
+ (', '.join('%d:%s' % (r, repo[r]) for r in revs)
+ for revs in unwanted if revs is not None))
+ raise error.Abort(
+ _('rebasing %d:%s will include unwanted changes from %s')
+ % (rev, repo[rev], unwanteddesc))
+
+ repo.ui.debug(" future parents are %d and %d\n" % tuple(newps))
+
+ return newps[0], newps[1], base
def isagitpatch(repo, patchname):
'Return true if the given patch is in git format'
@@ -1162,7 +1348,6 @@
def clearstatus(repo):
'Remove the status files'
- _clearrebasesetvisibiliy(repo)
# Make sure the active transaction won't write the state file
tr = repo.currenttransaction()
if tr:
@@ -1186,7 +1371,7 @@
return False
-def abort(repo, originalwd, dest, state, activebookmark=None):
+def abort(repo, originalwd, destmap, state, activebookmark=None):
'''Restore the repository to its original state. Additional args:
activebookmark: the name of the bookmark that should be active after the
@@ -1196,7 +1381,7 @@
# If the first commits in the rebased set get skipped during the rebase,
# their values within the state mapping will be the dest rev id. The
# dstates list must must not contain the dest rev (issue4896)
- dstates = [s for s in state.values() if s >= 0 and s != dest]
+ dstates = [s for r, s in state.items() if s >= 0 and s != destmap[r]]
immutable = [d for d in dstates if not repo[d].mutable()]
cleanup = True
if immutable:
@@ -1215,13 +1400,14 @@
if cleanup:
shouldupdate = False
- rebased = filter(lambda x: x >= 0 and x != dest, state.values())
+ rebased = [s for r, s in state.items()
+ if s >= 0 and s != destmap[r]]
if rebased:
strippoints = [
c.node() for c in repo.set('roots(%ld)', rebased)]
updateifonnodes = set(rebased)
- updateifonnodes.add(dest)
+ updateifonnodes.update(destmap.values())
updateifonnodes.add(originalwd)
shouldupdate = repo['.'].rev() in updateifonnodes
@@ -1243,31 +1429,65 @@
repo.ui.warn(_('rebase aborted\n'))
return 0
-def buildstate(repo, dest, rebaseset, collapse, obsoletenotrebased):
+def sortsource(destmap):
+ """yield source revisions in an order that we only rebase things once
+
+ If source and destination overlaps, we should filter out revisions
+ depending on other revisions which hasn't been rebased yet.
+
+ Yield a sorted list of revisions each time.
+
+ For example, when rebasing A to B, B to C. This function yields [B], then
+ [A], indicating B needs to be rebased first.
+
+ Raise if there is a cycle so the rebase is impossible.
+ """
+ srcset = set(destmap)
+ while srcset:
+ srclist = sorted(srcset)
+ result = []
+ for r in srclist:
+ if destmap[r] not in srcset:
+ result.append(r)
+ if not result:
+ raise error.Abort(_('source and destination form a cycle'))
+ srcset -= set(result)
+ yield result
+
+def buildstate(repo, destmap, collapse):
'''Define which revisions are going to be rebased and where
repo: repo
- dest: context
- rebaseset: set of rev
+ destmap: {srcrev: destrev}
'''
+ rebaseset = destmap.keys()
originalwd = repo['.'].rev()
- _setrebasesetvisibility(repo, set(rebaseset) | {originalwd})
# This check isn't strictly necessary, since mq detects commits over an
# applied patch. But it prevents messing up the working directory when
# a partially completed rebase is blocked by mq.
- if 'qtip' in repo.tags() and (dest.node() in
- [s.node for s in repo.mq.applied]):
- raise error.Abort(_('cannot rebase onto an applied mq patch'))
+ if 'qtip' in repo.tags():
+ mqapplied = set(repo[s.node].rev() for s in repo.mq.applied)
+ if set(destmap.values()) & mqapplied:
+ raise error.Abort(_('cannot rebase onto an applied mq patch'))
- roots = list(repo.set('roots(%ld)', rebaseset))
+ # Get "cycle" error early by exhausting the generator.
+ sortedsrc = list(sortsource(destmap)) # a list of sorted revs
+ if not sortedsrc:
+ raise error.Abort(_('no matching revisions'))
+
+ # Only check the first batch of revisions to rebase not depending on other
+ # rebaseset. This means "source is ancestor of destination" for the second
+ # (and following) batches of revisions are not checked here. We rely on
+ # "defineparents" to do that check.
+ roots = list(repo.set('roots(%ld)', sortedsrc[0]))
if not roots:
raise error.Abort(_('no matching revisions'))
roots.sort()
state = dict.fromkeys(rebaseset, revtodo)
- detachset = set()
- emptyrebase = True
+ emptyrebase = (len(sortedsrc) == 1)
for root in roots:
+ dest = repo[destmap[root.rev()]]
commonbase = root.ancestor(dest)
if commonbase == root:
raise error.Abort(_('source is ancestor of destination'))
@@ -1287,47 +1507,6 @@
emptyrebase = False
repo.ui.debug('rebase onto %s starting from %s\n' % (dest, root))
- # Rebase tries to turn <dest> into a parent of <root> while
- # preserving the number of parents of rebased changesets:
- #
- # - A changeset with a single parent will always be rebased as a
- # changeset with a single parent.
- #
- # - A merge will be rebased as merge unless its parents are both
- # ancestors of <dest> or are themselves in the rebased set and
- # pruned while rebased.
- #
- # If one parent of <root> is an ancestor of <dest>, the rebased
- # version of this parent will be <dest>. This is always true with
- # --base option.
- #
- # Otherwise, we need to *replace* the original parents with
- # <dest>. This "detaches" the rebased set from its former location
- # and rebases it onto <dest>. Changes introduced by ancestors of
- # <root> not common with <dest> (the detachset, marked as
- # nullmerge) are "removed" from the rebased changesets.
- #
- # - If <root> has a single parent, set it to <dest>.
- #
- # - If <root> is a merge, we cannot decide which parent to
- # replace, the rebase operation is not clearly defined.
- #
- # The table below sums up this behavior:
- #
- # +------------------+----------------------+-------------------------+
- # | | one parent | merge |
- # +------------------+----------------------+-------------------------+
- # | parent in | new parent is <dest> | parents in ::<dest> are |
- # | ::<dest> | | remapped to <dest> |
- # +------------------+----------------------+-------------------------+
- # | unrelated source | new parent is <dest> | ambiguous, abort |
- # +------------------+----------------------+-------------------------+
- #
- # The actual abort is handled by `defineparents`
- if len(root.parents()) <= 1:
- # ancestors of <root> not ancestors of <dest>
- detachset.update(repo.changelog.findmissingrevs([commonbase.rev()],
- [root.rev()]))
if emptyrebase:
return None
for rev in sorted(state):
@@ -1335,26 +1514,10 @@
# if all parents of this revision are done, then so is this revision
if parents and all((state.get(p) == p for p in parents)):
state[rev] = rev
- for r in detachset:
- if r not in state:
- state[r] = nullmerge
- if len(roots) > 1:
- # If we have multiple roots, we may have "hole" in the rebase set.
- # Rebase roots that descend from those "hole" should not be detached as
- # other root are. We use the special `revignored` to inform rebase that
- # the revision should be ignored but that `defineparents` should search
- # a rebase destination that make sense regarding rebased topology.
- rebasedomain = set(repo.revs('%ld::%ld', rebaseset, rebaseset))
- for ignored in set(rebasedomain) - set(rebaseset):
- state[ignored] = revignored
- for r in obsoletenotrebased:
- if obsoletenotrebased[r] is None:
- state[r] = revpruned
- else:
- state[r] = revprecursor
- return originalwd, dest.rev(), state
+ return originalwd, destmap, state
-def clearrebased(ui, repo, dest, state, skipped, collapsedas=None, keepf=False):
+def clearrebased(ui, repo, destmap, state, skipped, collapsedas=None,
+ keepf=False):
"""dispose of rebased revision at the end of the rebase
If `collapsedas` is not None, the rebase was a collapse whose result if the
@@ -1439,66 +1602,36 @@
return ret
-def _setrebasesetvisibility(repo, revs):
- """store the currently rebased set on the repo object
-
- This is used by another function to prevent rebased revision to because
- hidden (see issue4504)"""
- repo = repo.unfiltered()
- repo._rebaseset = revs
- # invalidate cache if visibility changes
- hiddens = repo.filteredrevcache.get('visible', set())
- if revs & hiddens:
- repo.invalidatevolatilesets()
-
-def _clearrebasesetvisibiliy(repo):
- """remove rebaseset data from the repo"""
- repo = repo.unfiltered()
- if '_rebaseset' in vars(repo):
- del repo._rebaseset
-
-def _rebasedvisible(orig, repo):
- """ensure rebased revs stay visible (see issue4504)"""
- blockers = orig(repo)
- blockers.update(getattr(repo, '_rebaseset', ()))
- return blockers
-
def _filterobsoleterevs(repo, revs):
"""returns a set of the obsolete revisions in revs"""
return set(r for r in revs if repo[r].obsolete())
-def _computeobsoletenotrebased(repo, rebaseobsrevs, dest):
+def _computeobsoletenotrebased(repo, rebaseobsrevs, destmap):
"""return a mapping obsolete => successor for all obsolete nodes to be
rebased that have a successors in the destination
obsolete => None entries in the mapping indicate nodes with no successor"""
obsoletenotrebased = {}
- # Build a mapping successor => obsolete nodes for the obsolete
- # nodes to be rebased
- allsuccessors = {}
+ assert repo.filtername is None
cl = repo.changelog
- for r in rebaseobsrevs:
- node = cl.node(r)
- for s in obsutil.allsuccessors(repo.obsstore, [node]):
- try:
- allsuccessors[cl.rev(s)] = cl.rev(node)
- except LookupError:
- pass
-
- if allsuccessors:
- # Look for successors of obsolete nodes to be rebased among
- # the ancestors of dest
- ancs = cl.ancestors([dest],
- stoprev=min(allsuccessors),
- inclusive=True)
- for s in allsuccessors:
- if s in ancs:
- obsoletenotrebased[allsuccessors[s]] = s
- elif (s == allsuccessors[s] and
- allsuccessors.values().count(s) == 1):
- # plain prune
- obsoletenotrebased[s] = None
+ nodemap = cl.nodemap
+ for srcrev in rebaseobsrevs:
+ srcnode = cl.node(srcrev)
+ destnode = cl.node(destmap[srcrev])
+ # XXX: more advanced APIs are required to handle split correctly
+ successors = list(obsutil.allsuccessors(repo.obsstore, [srcnode]))
+ if len(successors) == 1:
+ # obsutil.allsuccessors includes node itself. When the list only
+ # contains one element, it means there are no successors.
+ obsoletenotrebased[srcrev] = None
+ else:
+ for succnode in successors:
+ if succnode == srcnode or succnode not in nodemap:
+ continue
+ if cl.isancestor(succnode, destnode):
+ obsoletenotrebased[srcrev] = nodemap[succnode]
+ break
return obsoletenotrebased
@@ -1534,5 +1667,3 @@
_("use 'hg rebase --continue' or 'hg rebase --abort'")])
cmdutil.afterresolvedstates.append(
['rebasestate', _('hg rebase --continue')])
- # ensure rebased rev are not hidden
- extensions.wrapfunction(repoview, 'pinnedrevs', _rebasedvisible)
--- a/hgext/releasenotes.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgext/releasenotes.py Sat Sep 30 07:52:48 2017 -0700
@@ -13,6 +13,7 @@
from __future__ import absolute_import
+import difflib
import errno
import re
import sys
@@ -46,6 +47,7 @@
]
RE_DIRECTIVE = re.compile('^\.\. ([a-zA-Z0-9_]+)::\s*([^$]+)?$')
+RE_ISSUE = r'\bissue ?[0-9]{4,6}(?![0-9])\b'
BULLET_SECTION = _('Other Changes')
@@ -92,6 +94,8 @@
This is used to combine multiple sources of release notes together.
"""
for section in other:
+ existingnotes = converttitled(self.titledforsection(section)) + \
+ convertnontitled(self.nontitledforsection(section))
for title, paragraphs in other.titledforsection(section):
if self.hastitledinsection(section, title):
# TODO prompt for resolution if different and running in
@@ -100,16 +104,32 @@
(title, section))
continue
- # TODO perform similarity comparison and try to match against
- # existing.
+ incoming_str = converttitled([(title, paragraphs)])[0]
+ if section == 'fix':
+ issue = getissuenum(incoming_str)
+ if issue:
+ if findissue(ui, existingnotes, issue):
+ continue
+
+ if similar(ui, existingnotes, incoming_str):
+ continue
+
self.addtitleditem(section, title, paragraphs)
for paragraphs in other.nontitledforsection(section):
if paragraphs in self.nontitledforsection(section):
continue
- # TODO perform similarily comparison and try to match against
- # existing.
+ incoming_str = convertnontitled([paragraphs])[0]
+ if section == 'fix':
+ issue = getissuenum(incoming_str)
+ if issue:
+ if findissue(ui, existingnotes, issue):
+ continue
+
+ if similar(ui, existingnotes, incoming_str):
+ continue
+
self.addnontitleditem(section, paragraphs)
class releasenotessections(object):
@@ -136,6 +156,77 @@
return None
+def converttitled(titledparagraphs):
+ """
+ Convert titled paragraphs to strings
+ """
+ string_list = []
+ for title, paragraphs in titledparagraphs:
+ lines = []
+ for para in paragraphs:
+ lines.extend(para)
+ string_list.append(' '.join(lines))
+ return string_list
+
+def convertnontitled(nontitledparagraphs):
+ """
+ Convert non-titled bullets to strings
+ """
+ string_list = []
+ for paragraphs in nontitledparagraphs:
+ lines = []
+ for para in paragraphs:
+ lines.extend(para)
+ string_list.append(' '.join(lines))
+ return string_list
+
+def getissuenum(incoming_str):
+ """
+ Returns issue number from the incoming string if it exists
+ """
+ issue = re.search(RE_ISSUE, incoming_str, re.IGNORECASE)
+ if issue:
+ issue = issue.group()
+ return issue
+
+def findissue(ui, existing, issue):
+ """
+ Returns true if issue number already exists in notes.
+ """
+ if any(issue in s for s in existing):
+ ui.write(_('"%s" already exists in notes; ignoring\n') % issue)
+ return True
+ else:
+ return False
+
+def similar(ui, existing, incoming_str):
+ """
+ Returns true if similar note found in existing notes.
+ """
+ if len(incoming_str.split()) > 10:
+ merge = similaritycheck(incoming_str, existing)
+ if not merge:
+ ui.write(_('"%s" already exists in notes file; ignoring\n')
+ % incoming_str)
+ return True
+ else:
+ return False
+ else:
+ return False
+
+def similaritycheck(incoming_str, existingnotes):
+ """
+ Returns true when note fragment can be merged to existing notes.
+ """
+ import fuzzywuzzy.fuzz as fuzz
+ merge = True
+ for bullet in existingnotes:
+ score = fuzz.token_set_ratio(incoming_str, bullet)
+ if score > 75:
+ merge = False
+ break
+ return merge
+
def getcustomadmonitions(repo):
ctx = repo['.']
p = config.config()
@@ -152,6 +243,42 @@
read('.hgreleasenotes')
return p['sections']
+def checkadmonitions(ui, repo, directives, revs):
+ """
+ Checks the commit messages for admonitions and their validity.
+
+ .. abcd::
+
+ First paragraph under this admonition
+
+ For this commit message, using `hg releasenotes -r . --check`
+ returns: Invalid admonition 'abcd' present in changeset 3ea92981e103
+
+ As admonition 'abcd' is neither present in default nor custom admonitions
+ """
+ for rev in revs:
+ ctx = repo[rev]
+ admonition = re.search(RE_DIRECTIVE, ctx.description())
+ if admonition:
+ if admonition.group(1) in directives:
+ continue
+ else:
+ ui.write(_("Invalid admonition '%s' present in changeset %s"
+ "\n") % (admonition.group(1), ctx.hex()[:12]))
+ sim = lambda x: difflib.SequenceMatcher(None,
+ admonition.group(1), x).ratio()
+
+ similar = [s for s in directives if sim(s) > 0.6]
+ if len(similar) == 1:
+ ui.write(_("(did you mean %s?)\n") % similar[0])
+ elif similar:
+ ss = ", ".join(sorted(similar))
+ ui.write(_("(did you mean one of %s?)\n") % ss)
+
+def _getadmonitionlist(ui, sections):
+ for section in sections:
+ ui.write("%s: %s\n" % (section[0], section[1]))
+
def parsenotesfromrevisions(repo, directives, revs):
notes = parsedreleasenotes()
@@ -336,15 +463,19 @@
lines.append('')
- if lines[-1]:
+ if lines and lines[-1]:
lines.append('')
return '\n'.join(lines)
@command('releasenotes',
- [('r', 'rev', '', _('revisions to process for release notes'), _('REV'))],
- _('[-r REV] FILE'))
-def releasenotes(ui, repo, file_, rev=None):
+ [('r', 'rev', '', _('revisions to process for release notes'), _('REV')),
+ ('c', 'check', False, _('checks for validity of admonitions (if any)'),
+ _('REV')),
+ ('l', 'list', False, _('list the available admonitions with their title'),
+ None)],
+ _('hg releasenotes [-r REV] [-c] FILE'))
+def releasenotes(ui, repo, file_=None, **opts):
"""parse release notes from commit messages into an output file
Given an output file and set of revisions, this command will parse commit
@@ -419,10 +550,30 @@
this command and changes should not be lost when running this command on
that file. A particular use case for this is to tweak the wording of a
release note after it has been added to the release notes file.
+
+ The -c/--check option checks the commit message for invalid admonitions.
+
+ The -l/--list option, presents the user with a list of existing available
+ admonitions along with their title. This also includes the custom
+ admonitions (if any).
"""
sections = releasenotessections(ui, repo)
+ listflag = opts.get('list')
+
+ if listflag and opts.get('rev'):
+ raise error.Abort(_('cannot use both \'--list\' and \'--rev\''))
+ if listflag and opts.get('check'):
+ raise error.Abort(_('cannot use both \'--list\' and \'--check\''))
+
+ if listflag:
+ return _getadmonitionlist(ui, sections)
+
+ rev = opts.get('rev')
revs = scmutil.revrange(repo, [rev or 'not public()'])
+ if opts.get('check'):
+ return checkadmonitions(ui, repo, sections.names(), revs)
+
incoming = parsenotesfromrevisions(repo, sections.names(), revs)
try:
--- a/hgext/shelve.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgext/shelve.py Sat Sep 30 07:52:48 2017 -0700
@@ -33,6 +33,7 @@
bundlerepo,
changegroup,
cmdutil,
+ discovery,
error,
exchange,
hg,
@@ -145,8 +146,11 @@
btype = 'HG20'
compression = 'BZ'
- cg = changegroup.changegroupsubset(self.repo, bases, [node], 'shelve',
- version=cgversion)
+ outgoing = discovery.outgoing(self.repo, missingroots=bases,
+ missingheads=[node])
+ cg = changegroup.makechangegroup(self.repo, outgoing, cgversion,
+ 'shelve')
+
bundle2.writebundle(self.ui, cg, self.fname, btype, self.vfs,
compression=compression)
--- a/hgext/show.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgext/show.py Sat Sep 30 07:52:48 2017 -0700
@@ -161,9 +161,10 @@
ui.write(_('(no bookmarks set)\n'))
return
+ revs = [repo[node].rev() for node in marks.values()]
active = repo._activebookmark
longestname = max(len(b) for b in marks)
- # TODO consider exposing longest shortest(node).
+ nodelen = longestshortest(repo, revs)
for bm, node in sorted(marks.items()):
fm.startitem()
@@ -171,7 +172,8 @@
fm.write('bookmark', '%s', bm)
fm.write('node', fm.hexfunc(node), fm.hexfunc(node))
fm.data(active=bm == active,
- longestbookmarklen=longestname)
+ longestbookmarklen=longestname,
+ nodelen=nodelen)
@showview('stack', csettopic='stack')
def showstack(ui, repo, displayer):
@@ -235,6 +237,9 @@
else:
newheads = set()
+ allrevs = set(stackrevs) | newheads | set([baserev])
+ nodelen = longestshortest(repo, allrevs)
+
try:
cmdutil.findcmd('rebase', commands.table)
haverebase = True
@@ -246,7 +251,7 @@
# our simplicity and the customizations required.
# TODO use proper graph symbols from graphmod
- shortesttmpl = formatter.maketemplater(ui, '{shortest(node, 5)}')
+ shortesttmpl = formatter.maketemplater(ui, '{shortest(node, %d)}' % nodelen)
def shortest(ctx):
return shortesttmpl.render({'ctx': ctx, 'node': ctx.hex()})
@@ -277,7 +282,7 @@
ui.write(' ')
ui.write(('o '))
- displayer.show(ctx)
+ displayer.show(ctx, nodelen=nodelen)
displayer.flush(ctx)
ui.write('\n')
@@ -317,7 +322,7 @@
ui.write(' ')
ui.write(symbol, ' ')
- displayer.show(ctx)
+ displayer.show(ctx, nodelen=nodelen)
displayer.flush(ctx)
ui.write('\n')
@@ -334,7 +339,7 @@
ui.write(_('(stack base)'), '\n', label='stack.label')
ui.write(('o '))
- displayer.show(basectx)
+ displayer.show(basectx, nodelen=nodelen)
displayer.flush(basectx)
ui.write('\n')
@@ -393,11 +398,13 @@
"""changesets that aren't finished"""
# TODO support date-based limiting when calling revset.
revs = repo.revs('sort(_underway(), topo)')
+ nodelen = longestshortest(repo, revs)
revdag = graphmod.dagwalker(repo, revs)
ui.setconfig('experimental', 'graphshorten', True)
- cmdutil.displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges)
+ cmdutil.displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges,
+ props={'nodelen': nodelen})
def extsetup(ui):
# Alias `hg <prefix><view>` to `hg show <view>`.
@@ -418,6 +425,27 @@
ui.setconfig('alias', name, 'show %s' % view, source='show')
+def longestshortest(repo, revs, minlen=4):
+ """Return the length of the longest shortest node to identify revisions.
+
+ The result of this function can be used with the ``shortest()`` template
+ function to ensure that a value is unique and unambiguous for a given
+ set of nodes.
+
+ The number of revisions in the repo is taken into account to prevent
+ a numeric node prefix from conflicting with an integer revision number.
+ If we fail to do this, a value of e.g. ``10023`` could mean either
+ revision 10023 or node ``10023abc...``.
+ """
+ tmpl = formatter.maketemplater(repo.ui, '{shortest(node, %d)}' % minlen)
+ lens = [minlen]
+ for rev in revs:
+ ctx = repo[rev]
+ shortest = tmpl.render({'ctx': ctx, 'node': ctx.hex()})
+ lens.append(len(shortest))
+
+ return max(lens)
+
# Adjust the docstring of the show command so it shows all registered views.
# This is a bit hacky because it runs at the end of module load. When moved
# into core or when another extension wants to provide a view, we'll need
--- a/hgext/sparse.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgext/sparse.py Sat Sep 30 07:52:48 2017 -0700
@@ -155,7 +155,8 @@
if include or exclude or enableprofile:
def clonesparse(orig, self, node, overwrite, *args, **kwargs):
sparse.updateconfig(self.unfiltered(), pat, {}, include=include,
- exclude=exclude, enableprofile=enableprofile)
+ exclude=exclude, enableprofile=enableprofile,
+ usereporootpaths=True)
return orig(self, node, overwrite, *args, **kwargs)
extensions.wrapfunction(hg, 'updaterepo', clonesparse)
return orig(ui, repo, *args, **opts)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/uncommit.py Sat Sep 30 07:52:48 2017 -0700
@@ -0,0 +1,187 @@
+# uncommit - undo the actions of a commit
+#
+# Copyright 2011 Peter Arrenbrecht <peter.arrenbrecht@gmail.com>
+# Logilab SA <contact@logilab.fr>
+# Pierre-Yves David <pierre-yves.david@ens-lyon.org>
+# Patrick Mezard <patrick@mezard.eu>
+# Copyright 2016 Facebook, Inc.
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+"""uncommit part or all of a local changeset (EXPERIMENTAL)
+
+This command undoes the effect of a local commit, returning the affected
+files to their uncommitted state. This means that files modified, added or
+removed in the changeset will be left unchanged, and so will remain modified,
+added and removed in the working directory.
+"""
+
+from __future__ import absolute_import
+
+from mercurial.i18n import _
+
+from mercurial import (
+ cmdutil,
+ commands,
+ context,
+ copies,
+ error,
+ node,
+ obsolete,
+ registrar,
+ scmutil,
+)
+
+cmdtable = {}
+command = registrar.command(cmdtable)
+
+# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
+# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
+# be specifying the version(s) of Mercurial they are tested with, or
+# leave the attribute unspecified.
+testedwith = 'ships-with-hg-core'
+
+def _commitfiltered(repo, ctx, match, allowempty):
+ """Recommit ctx with changed files not in match. Return the new
+ node identifier, or None if nothing changed.
+ """
+ base = ctx.p1()
+ # ctx
+ initialfiles = set(ctx.files())
+ exclude = set(f for f in initialfiles if match(f))
+
+ # No files matched commit, so nothing excluded
+ if not exclude:
+ return None
+
+ files = (initialfiles - exclude)
+ # return the p1 so that we don't create an obsmarker later
+ if not files and not allowempty:
+ return ctx.parents()[0].node()
+
+ # Filter copies
+ copied = copies.pathcopies(base, ctx)
+ copied = dict((dst, src) for dst, src in copied.iteritems()
+ if dst in files)
+ def filectxfn(repo, memctx, path, contentctx=ctx, redirect=()):
+ if path not in contentctx:
+ return None
+ fctx = contentctx[path]
+ mctx = context.memfilectx(repo, fctx.path(), fctx.data(),
+ fctx.islink(),
+ fctx.isexec(),
+ copied=copied.get(path))
+ return mctx
+
+ new = context.memctx(repo,
+ parents=[base.node(), node.nullid],
+ text=ctx.description(),
+ files=files,
+ filectxfn=filectxfn,
+ user=ctx.user(),
+ date=ctx.date(),
+ extra=ctx.extra())
+ # phase handling
+ commitphase = ctx.phase()
+ overrides = {('phases', 'new-commit'): commitphase}
+ with repo.ui.configoverride(overrides, 'uncommit'):
+ newid = repo.commitctx(new)
+ return newid
+
+def _uncommitdirstate(repo, oldctx, match):
+ """Fix the dirstate after switching the working directory from
+ oldctx to a copy of oldctx not containing changed files matched by
+ match.
+ """
+ ctx = repo['.']
+ ds = repo.dirstate
+ copies = dict(ds.copies())
+ s = repo.status(oldctx.p1(), oldctx, match=match)
+ for f in s.modified:
+ if ds[f] == 'r':
+ # modified + removed -> removed
+ continue
+ ds.normallookup(f)
+
+ for f in s.added:
+ if ds[f] == 'r':
+ # added + removed -> unknown
+ ds.drop(f)
+ elif ds[f] != 'a':
+ ds.add(f)
+
+ for f in s.removed:
+ if ds[f] == 'a':
+ # removed + added -> normal
+ ds.normallookup(f)
+ elif ds[f] != 'r':
+ ds.remove(f)
+
+ # Merge old parent and old working dir copies
+ oldcopies = {}
+ for f in (s.modified + s.added):
+ src = oldctx[f].renamed()
+ if src:
+ oldcopies[f] = src[0]
+ oldcopies.update(copies)
+ copies = dict((dst, oldcopies.get(src, src))
+ for dst, src in oldcopies.iteritems())
+ # Adjust the dirstate copies
+ for dst, src in copies.iteritems():
+ if (src not in ctx or dst in ctx or ds[dst] != 'a'):
+ src = None
+ ds.copy(src, dst)
+
+@command('uncommit',
+ [('', 'keep', False, _('allow an empty commit after uncommiting')),
+ ] + commands.walkopts,
+ _('[OPTION]... [FILE]...'))
+def uncommit(ui, repo, *pats, **opts):
+ """uncommit part or all of a local changeset
+
+ This command undoes the effect of a local commit, returning the affected
+ files to their uncommitted state. This means that files modified or
+ deleted in the changeset will be left unchanged, and so will remain
+ modified in the working directory.
+ """
+
+ with repo.wlock(), repo.lock():
+ wctx = repo[None]
+
+ if not pats and not repo.ui.configbool('experimental',
+ 'uncommitondirtywdir', False):
+ cmdutil.bailifchanged(repo)
+ if wctx.parents()[0].node() == node.nullid:
+ raise error.Abort(_("cannot uncommit null changeset"))
+ if len(wctx.parents()) > 1:
+ raise error.Abort(_("cannot uncommit while merging"))
+ old = repo['.']
+ if not old.mutable():
+ raise error.Abort(_('cannot uncommit public changesets'))
+ if len(old.parents()) > 1:
+ raise error.Abort(_("cannot uncommit merge changeset"))
+ allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
+ if not allowunstable and old.children():
+ raise error.Abort(_('cannot uncommit changeset with children'))
+
+ with repo.transaction('uncommit'):
+ match = scmutil.match(old, pats, opts)
+ newid = _commitfiltered(repo, old, match, opts.get('keep'))
+ if newid is None:
+ ui.status(_("nothing to uncommit\n"))
+ return 1
+
+ mapping = {}
+ if newid != old.p1().node():
+ # Move local changes on filtered changeset
+ mapping[old.node()] = (newid,)
+ else:
+ # Fully removed the old commit
+ mapping[old.node()] = ()
+
+ scmutil.cleanupnodes(repo, mapping, 'uncommit')
+
+ with repo.dirstate.parentchange():
+ repo.dirstate.setparents(newid, node.nullid)
+ _uncommitdirstate(repo, old, match)
--- a/hgext/win32mbcs.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgext/win32mbcs.py Sat Sep 30 07:52:48 2017 -0700
@@ -54,6 +54,7 @@
encoding,
error,
pycompat,
+ registrar,
)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
@@ -62,6 +63,15 @@
# leave the attribute unspecified.
testedwith = 'ships-with-hg-core'
+configtable = {}
+configitem = registrar.configitem(configtable)
+
+# Encoding.encoding may be updated by --encoding option.
+# Use a lambda do delay the resolution.
+configitem('win32mbcs', 'encoding',
+ default=lambda: encoding.encoding,
+)
+
_encoding = None # see extsetup
def decode(arg):
@@ -175,7 +185,7 @@
return
# determine encoding for filename
global _encoding
- _encoding = ui.config('win32mbcs', 'encoding', encoding.encoding)
+ _encoding = ui.config('win32mbcs', 'encoding')
# fake is only for relevant environment.
if _encoding.lower() in problematic_encodings.split():
for f in funcs.split():
--- a/hgext/win32text.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgext/win32text.py Sat Sep 30 07:52:48 2017 -0700
@@ -49,6 +49,7 @@
short,
)
from mercurial import (
+ registrar,
util,
)
@@ -58,6 +59,13 @@
# leave the attribute unspecified.
testedwith = 'ships-with-hg-core'
+configtable = {}
+configitem = registrar.configitem(configtable)
+
+configitem('win32text', 'warn',
+ default=True,
+)
+
# regexp for single LF without CR preceding.
re_single_lf = re.compile('(^|[^\r])\n', re.MULTILINE)
@@ -178,6 +186,6 @@
def extsetup(ui):
# deprecated config: win32text.warn
- if ui.configbool('win32text', 'warn', True):
+ if ui.configbool('win32text', 'warn'):
ui.warn(_("win32text is deprecated: "
"https://mercurial-scm.org/wiki/Win32TextExtension\n"))
--- a/hgext/zeroconf/__init__.py Mon Sep 18 10:54:00 2017 -0700
+++ b/hgext/zeroconf/__init__.py Sat Sep 30 07:52:48 2017 -0700
@@ -127,7 +127,9 @@
with app._obtainrepo() as repo:
name = app.reponame or os.path.basename(repo.root)
path = repo.ui.config("web", "prefix", "").strip('/')
- desc = repo.ui.config("web", "description", name)
+ desc = repo.ui.config("web", "description")
+ if not desc:
+ desc = name
publish(name, desc, path, port)
else:
# webdir
@@ -137,7 +139,9 @@
u.readconfig(os.path.join(path, '.hg', 'hgrc'))
name = os.path.basename(repo)
path = (prefix + repo).strip('/')
- desc = u.config('web', 'description', name)
+ desc = u.config('web', 'description')
+ if not desc:
+ desc = name
publish(name, desc, path, port)
return httpd
--- a/i18n/check-translation.py Mon Sep 18 10:54:00 2017 -0700
+++ b/i18n/check-translation.py Sat Sep 30 07:52:48 2017 -0700
@@ -1,9 +1,11 @@
#!/usr/bin/env python
#
# check-translation.py - check Mercurial specific translation problems
+from __future__ import absolute_import
+
+import re
import polib
-import re
scanners = []
checkers = []
@@ -51,7 +53,7 @@
... msgstr='prompt missing &sep$$missing amp$$followed by none&')
>>> match(promptchoice, pe)
True
- >>> for e in promptchoice(pe): print e
+ >>> for e in promptchoice(pe): print(e)
number of choices differs between msgid and msgstr
msgstr has invalid choice missing '&'
msgstr has invalid '&' followed by none
@@ -88,19 +90,19 @@
... msgstr= 'something (DEPRECATED)')
>>> match(deprecated, pe)
True
- >>> for e in deprecated(pe): print e
+ >>> for e in deprecated(pe): print(e)
>>> pe = polib.POEntry(
... msgid = 'Something (DEPRECATED)',
... msgstr= 'something (DETACERPED)')
>>> match(deprecated, pe)
True
- >>> for e in deprecated(pe): print e
+ >>> for e in deprecated(pe): print(e)
>>> pe = polib.POEntry(
... msgid = 'Something (DEPRECATED)',
... msgstr= 'something')
>>> match(deprecated, pe)
True
- >>> for e in deprecated(pe): print e
+ >>> for e in deprecated(pe): print(e)
msgstr inconsistently translated (DEPRECATED)
>>> pe = polib.POEntry(
... msgid = 'Something (DEPRECATED, foo bar)',
@@ -124,16 +126,16 @@
>>> pe = polib.POEntry(
... msgid ='ends with ::',
... msgstr='ends with ::')
- >>> for e in taildoublecolons(pe): print e
+ >>> for e in taildoublecolons(pe): print(e)
>>> pe = polib.POEntry(
... msgid ='ends with ::',
... msgstr='ends without double-colons')
- >>> for e in taildoublecolons(pe): print e
+ >>> for e in taildoublecolons(pe): print(e)
tail '::'-ness differs between msgid and msgstr
>>> pe = polib.POEntry(
... msgid ='ends without double-colons',
... msgstr='ends with ::')
- >>> for e in taildoublecolons(pe): print e
+ >>> for e in taildoublecolons(pe): print(e)
tail '::'-ness differs between msgid and msgstr
"""
if pe.msgid.endswith('::') != pe.msgstr.endswith('::'):
@@ -149,7 +151,7 @@
>>> pe = polib.POEntry(
... msgid =' indented text',
... msgstr=' narrowed indentation')
- >>> for e in indentation(pe): print e
+ >>> for e in indentation(pe): print(e)
initial indentation width differs betweeen msgid and msgstr
"""
idindent = len(pe.msgid) - len(pe.msgid.lstrip())
--- a/i18n/de.po Mon Sep 18 10:54:00 2017 -0700
+++ b/i18n/de.po Sat Sep 30 07:52:48 2017 -0700
@@ -9746,8 +9746,8 @@
#. i18n: column positioning for "hg log"
#, python-format
-msgid "changeset: %d:%s\n"
-msgstr "Änderung: %d:%s\n"
+msgid "changeset: %s\n"
+msgstr "Änderung: %s\n"
#. i18n: column positioning for "hg log"
#, python-format
@@ -9771,8 +9771,8 @@
#. i18n: column positioning for "hg log"
#, python-format
-msgid "parent: %d:%s\n"
-msgstr "Vorgänger: %d:%s\n"
+msgid "parent: %s\n"
+msgstr "Vorgänger: %s\n"
#. i18n: column positioning for "hg log"
#, python-format
--- a/i18n/hggettext Mon Sep 18 10:54:00 2017 -0700
+++ b/i18n/hggettext Sat Sep 30 07:52:48 2017 -0700
@@ -24,6 +24,7 @@
import inspect
import os
+import re
import sys
@@ -60,9 +61,15 @@
'msgid %s\n' % normalize(s) +
'msgstr ""\n')
+doctestre = re.compile(r'^ +>>> ', re.MULTILINE)
def offset(src, doc, name, default):
"""Compute offset or issue a warning on stdout."""
+ # remove doctest part, in order to avoid backslash mismatching
+ m = doctestre.search(doc)
+ if m:
+ doc = doc[:m.start()]
+
# Backslashes in doc appear doubled in src.
end = src.find(doc.replace('\\', '\\\\'))
if end == -1:
@@ -96,7 +103,7 @@
only extract docstrings from functions mentioned in these tables.
"""
mod = importpath(path)
- if mod.__doc__:
+ if not path.startswith('mercurial/') and mod.__doc__:
src = open(path).read()
lineno = 1 + offset(src, mod.__doc__, path, 7)
print(poentry(path, lineno, mod.__doc__))
@@ -112,6 +119,8 @@
for func, rstrip in functions:
if func.__doc__:
+ docobj = func # this might be a proxy to provide formatted doc
+ func = getattr(func, '_origfunc', func)
funcmod = inspect.getmodule(func)
extra = ''
if funcmod.__package__ == funcmod.__name__:
@@ -121,10 +130,15 @@
src = inspect.getsource(func)
name = "%s.%s" % (actualpath, func.__name__)
lineno = inspect.getsourcelines(func)[1]
- doc = func.__doc__
+ doc = docobj.__doc__
+ origdoc = getattr(docobj, '_origdoc', '')
if rstrip:
doc = doc.rstrip()
- lineno += offset(src, doc, name, 1)
+ origdoc = origdoc.rstrip()
+ if origdoc:
+ lineno += offset(src, origdoc, name, 1)
+ else:
+ lineno += offset(src, doc, name, 1)
print(poentry(actualpath, lineno, doc))
--- a/mercurial/branchmap.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/branchmap.py Sat Sep 30 07:52:48 2017 -0700
@@ -211,10 +211,13 @@
Raise KeyError for unknown branch.'''
return self._branchtip(self[branch])[0]
+ def iteropen(self, nodes):
+ return (n for n in nodes if n not in self._closednodes)
+
def branchheads(self, branch, closed=False):
heads = self[branch]
if not closed:
- heads = [h for h in heads if h not in self._closednodes]
+ heads = list(self.iteropen(heads))
return heads
def iterbranches(self):
--- a/mercurial/bundle2.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/bundle2.py Sat Sep 30 07:52:48 2017 -0700
@@ -145,7 +145,7 @@
preserve.
"""
-from __future__ import absolute_import
+from __future__ import absolute_import, division
import errno
import re
@@ -179,8 +179,6 @@
_fpayloadsize = '>i'
_fpartparamcount = '>BB'
-_fphasesentry = '>i20s'
-
preferedchunksize = 4096
_parttypeforbidden = re.compile('[^a-zA-Z0-9_:-]')
@@ -296,9 +294,31 @@
self.repo = repo
self.ui = repo.ui
self.records = unbundlerecords()
- self.gettransaction = transactiongetter
self.reply = None
self.captureoutput = captureoutput
+ self.hookargs = {}
+ self._gettransaction = transactiongetter
+
+ def gettransaction(self):
+ transaction = self._gettransaction()
+
+ if self.hookargs:
+ # the ones added to the transaction supercede those added
+ # to the operation.
+ self.hookargs.update(transaction.hookargs)
+ transaction.hookargs = self.hookargs
+
+ # mark the hookargs as flushed. further attempts to add to
+ # hookargs will result in an abort.
+ self.hookargs = None
+
+ return transaction
+
+ def addhookargs(self, hookargs):
+ if self.hookargs is None:
+ raise error.ProgrammingError('attempted to add hookargs to '
+ 'operation after transaction started')
+ self.hookargs.update(hookargs)
class TransactionUnavailable(RuntimeError):
pass
@@ -325,6 +345,74 @@
_processchangegroup(op, unbundler, tr, source, url, **kwargs)
return op
+class partiterator(object):
+ def __init__(self, repo, op, unbundler):
+ self.repo = repo
+ self.op = op
+ self.unbundler = unbundler
+ self.iterator = None
+ self.count = 0
+ self.current = None
+
+ def __enter__(self):
+ def func():
+ itr = enumerate(self.unbundler.iterparts())
+ for count, p in itr:
+ self.count = count
+ self.current = p
+ yield p
+ p.seek(0, 2)
+ self.current = None
+ self.iterator = func()
+ return self.iterator
+
+ def __exit__(self, type, exc, tb):
+ if not self.iterator:
+ return
+
+ if exc:
+ # If exiting or interrupted, do not attempt to seek the stream in
+ # the finally block below. This makes abort faster.
+ if (self.current and
+ not isinstance(exc, (SystemExit, KeyboardInterrupt))):
+ # consume the part content to not corrupt the stream.
+ self.current.seek(0, 2)
+
+ # Any exceptions seeking to the end of the bundle at this point are
+ # almost certainly related to the underlying stream being bad.
+ # And, chances are that the exception we're handling is related to
+ # getting in that bad state. So, we swallow the seeking error and
+ # re-raise the original error.
+ seekerror = False
+ try:
+ for part in self.iterator:
+ # consume the bundle content
+ part.seek(0, 2)
+ except Exception:
+ seekerror = True
+
+ # Small hack to let caller code distinguish exceptions from bundle2
+ # processing from processing the old format. This is mostly needed
+ # to handle different return codes to unbundle according to the type
+ # of bundle. We should probably clean up or drop this return code
+ # craziness in a future version.
+ exc.duringunbundle2 = True
+ salvaged = []
+ replycaps = None
+ if self.op.reply is not None:
+ salvaged = self.op.reply.salvageoutput()
+ replycaps = self.op.reply.capabilities
+ exc._replycaps = replycaps
+ exc._bundle2salvagedoutput = salvaged
+
+ # Re-raising from a variable loses the original stack. So only use
+ # that form if we need to.
+ if seekerror:
+ raise exc
+
+ self.repo.ui.debug('bundle2-input-bundle: %i parts total\n' %
+ self.count)
+
def processbundle(repo, unbundler, transactiongetter=None, op=None):
"""This function process a bundle, apply effect to/from a repo
@@ -350,57 +438,22 @@
msg = ['bundle2-input-bundle:']
if unbundler.params:
msg.append(' %i params' % len(unbundler.params))
- if op.gettransaction is None or op.gettransaction is _notransaction:
+ if op._gettransaction is None or op._gettransaction is _notransaction:
msg.append(' no-transaction')
else:
msg.append(' with-transaction')
msg.append('\n')
repo.ui.debug(''.join(msg))
- iterparts = enumerate(unbundler.iterparts())
- part = None
- nbpart = 0
- try:
- for nbpart, part in iterparts:
- _processpart(op, part)
- except Exception as exc:
- # Any exceptions seeking to the end of the bundle at this point are
- # almost certainly related to the underlying stream being bad.
- # And, chances are that the exception we're handling is related to
- # getting in that bad state. So, we swallow the seeking error and
- # re-raise the original error.
- seekerror = False
- try:
- for nbpart, part in iterparts:
- # consume the bundle content
- part.seek(0, 2)
- except Exception:
- seekerror = True
- # Small hack to let caller code distinguish exceptions from bundle2
- # processing from processing the old format. This is mostly
- # needed to handle different return codes to unbundle according to the
- # type of bundle. We should probably clean up or drop this return code
- # craziness in a future version.
- exc.duringunbundle2 = True
- salvaged = []
- replycaps = None
- if op.reply is not None:
- salvaged = op.reply.salvageoutput()
- replycaps = op.reply.capabilities
- exc._replycaps = replycaps
- exc._bundle2salvagedoutput = salvaged
-
- # Re-raising from a variable loses the original stack. So only use
- # that form if we need to.
- if seekerror:
- raise exc
- else:
- raise
- finally:
- repo.ui.debug('bundle2-input-bundle: %i parts total\n' % nbpart)
+ processparts(repo, op, unbundler)
return op
+def processparts(repo, op, unbundler):
+ with partiterator(repo, op, unbundler) as parts:
+ for part in parts:
+ _processpart(op, part)
+
def _processchangegroup(op, cg, tr, source, url, **kwargs):
ret = cg.apply(op.repo, tr, source, url, **kwargs)
op.records.add('changegroup', {
@@ -408,77 +461,73 @@
})
return ret
+def _gethandler(op, part):
+ status = 'unknown' # used by debug output
+ try:
+ handler = parthandlermapping.get(part.type)
+ if handler is None:
+ status = 'unsupported-type'
+ raise error.BundleUnknownFeatureError(parttype=part.type)
+ indebug(op.ui, 'found a handler for part %s' % part.type)
+ unknownparams = part.mandatorykeys - handler.params
+ if unknownparams:
+ unknownparams = list(unknownparams)
+ unknownparams.sort()
+ status = 'unsupported-params (%s)' % ', '.join(unknownparams)
+ raise error.BundleUnknownFeatureError(parttype=part.type,
+ params=unknownparams)
+ status = 'supported'
+ except error.BundleUnknownFeatureError as exc:
+ if part.mandatory: # mandatory parts
+ raise
+ indebug(op.ui, 'ignoring unsupported advisory part %s' % exc)
+ return # skip to part processing
+ finally:
+ if op.ui.debugflag:
+ msg = ['bundle2-input-part: "%s"' % part.type]
+ if not part.mandatory:
+ msg.append(' (advisory)')
+ nbmp = len(part.mandatorykeys)
+ nbap = len(part.params) - nbmp
+ if nbmp or nbap:
+ msg.append(' (params:')
+ if nbmp:
+ msg.append(' %i mandatory' % nbmp)
+ if nbap:
+ msg.append(' %i advisory' % nbmp)
+ msg.append(')')
+ msg.append(' %s\n' % status)
+ op.ui.debug(''.join(msg))
+
+ return handler
+
def _processpart(op, part):
"""process a single part from a bundle
The part is guaranteed to have been fully consumed when the function exits
(even if an exception is raised)."""
- status = 'unknown' # used by debug output
- hardabort = False
+ handler = _gethandler(op, part)
+ if handler is None:
+ return
+
+ # handler is called outside the above try block so that we don't
+ # risk catching KeyErrors from anything other than the
+ # parthandlermapping lookup (any KeyError raised by handler()
+ # itself represents a defect of a different variety).
+ output = None
+ if op.captureoutput and op.reply is not None:
+ op.ui.pushbuffer(error=True, subproc=True)
+ output = ''
try:
- try:
- handler = parthandlermapping.get(part.type)
- if handler is None:
- status = 'unsupported-type'
- raise error.BundleUnknownFeatureError(parttype=part.type)
- indebug(op.ui, 'found a handler for part %r' % part.type)
- unknownparams = part.mandatorykeys - handler.params
- if unknownparams:
- unknownparams = list(unknownparams)
- unknownparams.sort()
- status = 'unsupported-params (%s)' % unknownparams
- raise error.BundleUnknownFeatureError(parttype=part.type,
- params=unknownparams)
- status = 'supported'
- except error.BundleUnknownFeatureError as exc:
- if part.mandatory: # mandatory parts
- raise
- indebug(op.ui, 'ignoring unsupported advisory part %s' % exc)
- return # skip to part processing
- finally:
- if op.ui.debugflag:
- msg = ['bundle2-input-part: "%s"' % part.type]
- if not part.mandatory:
- msg.append(' (advisory)')
- nbmp = len(part.mandatorykeys)
- nbap = len(part.params) - nbmp
- if nbmp or nbap:
- msg.append(' (params:')
- if nbmp:
- msg.append(' %i mandatory' % nbmp)
- if nbap:
- msg.append(' %i advisory' % nbmp)
- msg.append(')')
- msg.append(' %s\n' % status)
- op.ui.debug(''.join(msg))
-
- # handler is called outside the above try block so that we don't
- # risk catching KeyErrors from anything other than the
- # parthandlermapping lookup (any KeyError raised by handler()
- # itself represents a defect of a different variety).
- output = None
- if op.captureoutput and op.reply is not None:
- op.ui.pushbuffer(error=True, subproc=True)
- output = ''
- try:
- handler(op, part)
- finally:
- if output is not None:
- output = op.ui.popbuffer()
- if output:
- outpart = op.reply.newpart('output', data=output,
- mandatory=False)
- outpart.addparam('in-reply-to', str(part.id), mandatory=False)
- # If exiting or interrupted, do not attempt to seek the stream in the
- # finally block below. This makes abort faster.
- except (SystemExit, KeyboardInterrupt):
- hardabort = True
- raise
+ handler(op, part)
finally:
- # consume the part content to not corrupt the stream.
- if not hardabort:
- part.seek(0, 2)
-
+ if output is not None:
+ output = op.ui.popbuffer()
+ if output:
+ outpart = op.reply.newpart('output', data=output,
+ mandatory=False)
+ outpart.addparam(
+ 'in-reply-to', pycompat.bytestr(part.id), mandatory=False)
def decodecaps(blob):
"""decode a bundle2 caps bytes blob into a dictionary
@@ -563,9 +612,9 @@
def addparam(self, name, value=None):
"""add a stream level parameter"""
if not name:
- raise ValueError('empty parameter name')
- if name[0] not in string.letters:
- raise ValueError('non letter first character: %r' % name)
+ raise ValueError(r'empty parameter name')
+ if name[0:1] not in pycompat.bytestr(string.ascii_letters):
+ raise ValueError(r'non letter first character: %s' % name)
self._params.append((name, value))
def addpart(self, part):
@@ -741,14 +790,14 @@
ignored or failing.
"""
if not name:
- raise ValueError('empty parameter name')
- if name[0] not in string.letters:
- raise ValueError('non letter first character: %r' % name)
+ raise ValueError(r'empty parameter name')
+ if name[0:1] not in pycompat.bytestr(string.ascii_letters):
+ raise ValueError(r'non letter first character: %s' % name)
try:
handler = b2streamparamsmap[name.lower()]
except KeyError:
- if name[0].islower():
- indebug(self.ui, "ignoring unknown parameter %r" % name)
+ if name[0:1].islower():
+ indebug(self.ui, "ignoring unknown parameter %s" % name)
else:
raise error.BundleUnknownFeatureError(params=(name,))
else:
@@ -805,7 +854,11 @@
while headerblock is not None:
part = unbundlepart(self.ui, headerblock, self._fp)
yield part
+ # Seek to the end of the part to force it's consumption so the next
+ # part can be read. But then seek back to the beginning so the
+ # code consuming this generator has a part that starts at 0.
part.seek(0, 2)
+ part.seek(0)
headerblock = self._readpartheader()
indebug(self.ui, 'end of bundle2 stream')
@@ -964,7 +1017,8 @@
msg.append(')')
if not self.data:
msg.append(' empty payload')
- elif util.safehasattr(self.data, 'next'):
+ elif (util.safehasattr(self.data, 'next')
+ or util.safehasattr(self.data, '__next__')):
msg.append(' streamed payload')
else:
msg.append(' %i bytes payload' % len(self.data))
@@ -976,7 +1030,7 @@
parttype = self.type.upper()
else:
parttype = self.type.lower()
- outdebug(ui, 'part %s: "%s"' % (self.id, parttype))
+ outdebug(ui, 'part %s: "%s"' % (pycompat.bytestr(self.id), parttype))
## parttype
header = [_pack(_fparttypesize, len(parttype)),
parttype, _pack(_fpartid, self.id),
@@ -994,7 +1048,7 @@
for key, value in advpar:
parsizes.append(len(key))
parsizes.append(len(value))
- paramsizes = _pack(_makefpartparamsizes(len(parsizes) / 2), *parsizes)
+ paramsizes = _pack(_makefpartparamsizes(len(parsizes) // 2), *parsizes)
header.append(paramsizes)
# key, value
for key, value in manpar:
@@ -1004,7 +1058,11 @@
header.append(key)
header.append(value)
## finalize header
- headerchunk = ''.join(header)
+ try:
+ headerchunk = ''.join(header)
+ except TypeError:
+ raise TypeError(r'Found a non-bytes trying to '
+ r'build bundle part header: %r' % header)
outdebug(ui, 'header chunk size: %i' % len(headerchunk))
yield _pack(_fpartheadersize, len(headerchunk))
yield headerchunk
@@ -1021,11 +1079,12 @@
ui.debug('bundle2-generatorexit\n')
raise
except BaseException as exc:
+ bexc = util.forcebytestr(exc)
# backup exception data for later
ui.debug('bundle2-input-stream-interrupt: encoding exception %s'
- % exc)
+ % bexc)
tb = sys.exc_info()[2]
- msg = 'unexpected error: %s' % exc
+ msg = 'unexpected error: %s' % bexc
interpart = bundlepart('error:abort', [('message', msg)],
mandatory=False)
interpart.id = 0
@@ -1047,7 +1106,8 @@
Exists to handle the different methods to provide data to a part."""
# we only support fixed size data now.
# This will be improved in the future.
- if util.safehasattr(self.data, 'next'):
+ if (util.safehasattr(self.data, 'next')
+ or util.safehasattr(self.data, '__next__')):
buff = util.chunkbuffer(self.data)
chunk = buff.read(preferedchunksize)
while chunk:
@@ -1095,7 +1155,15 @@
return
part = unbundlepart(self.ui, headerblock, self._fp)
op = interruptoperation(self.ui)
- _processpart(op, part)
+ hardabort = False
+ try:
+ _processpart(op, part)
+ except (SystemExit, KeyboardInterrupt):
+ hardabort = True
+ raise
+ finally:
+ if not hardabort:
+ part.seek(0, 2)
self.ui.debug('bundle2-input-stream-interrupt:'
' closing out of band context\n')
@@ -1213,7 +1281,7 @@
self.type = self._fromheader(typesize)
indebug(self.ui, 'part type: "%s"' % self.type)
self.id = self._unpackheader(_fpartid)[0]
- indebug(self.ui, 'part id: "%s"' % self.id)
+ indebug(self.ui, 'part id: "%s"' % pycompat.bytestr(self.id))
# extract mandatory bit from type
self.mandatory = (self.type != self.type.lower())
self.type = self.type.lower()
@@ -1225,7 +1293,7 @@
fparamsizes = _makefpartparamsizes(mancount + advcount)
paramsizes = self._unpackheader(fparamsizes)
# make it a list of couple again
- paramsizes = zip(paramsizes[::2], paramsizes[1::2])
+ paramsizes = list(zip(paramsizes[::2], paramsizes[1::2]))
# split mandatory from advisory
mansizes = paramsizes[:mancount]
advsizes = paramsizes[mancount:]
@@ -1327,6 +1395,7 @@
'digests': tuple(sorted(util.DIGESTS.keys())),
'remote-changegroup': ('http', 'https'),
'hgtagsfnodes': (),
+ 'phases': ('heads',),
}
def getrepocaps(repo, allowpushback=False):
@@ -1345,6 +1414,8 @@
cpmode = repo.ui.config('server', 'concurrent-push-mode')
if cpmode == 'check-related':
caps['checkheads'] = ('related',)
+ if 'phases' in repo.ui.configlist('devel', 'legacy.exchange'):
+ caps.pop('phases')
return caps
def bundle2caps(remote):
@@ -1364,7 +1435,7 @@
def writenewbundle(ui, repo, source, filename, bundletype, outgoing, opts,
vfs=None, compression=None, compopts=None):
if bundletype.startswith('HG10'):
- cg = changegroup.getchangegroup(repo, source, outgoing, version='01')
+ cg = changegroup.makechangegroup(repo, outgoing, '01', source)
return writebundle(ui, cg, filename, bundletype, vfs=vfs,
compression=compression, compopts=compopts)
elif not bundletype.startswith('HG20'):
@@ -1392,12 +1463,11 @@
cgversion = opts.get('cg.version')
if cgversion is None:
cgversion = changegroup.safeversion(repo)
- cg = changegroup.getchangegroup(repo, source, outgoing,
- version=cgversion)
+ cg = changegroup.makechangegroup(repo, outgoing, cgversion, source)
part = bundler.newpart('changegroup', data=cg.getchunks())
part.addparam('version', cg.version)
if 'clcount' in cg.extras:
- part.addparam('nbchanges', str(cg.extras['clcount']),
+ part.addparam('nbchanges', '%d' % cg.extras['clcount'],
mandatory=False)
if opts.get('phases') and repo.revs('%ln and secret()',
outgoing.missingheads):
@@ -1411,11 +1481,8 @@
if opts.get('phases', False):
headsbyphase = phases.subsetphaseheads(repo, outgoing.missing)
- phasedata = []
- for phase in phases.allphases:
- for head in headsbyphase[phase]:
- phasedata.append(_pack(_fphasesentry, phase, head))
- bundler.newpart('phase-heads', data=''.join(phasedata))
+ phasedata = phases.binaryencode(headsbyphase)
+ bundler.newpart('phase-heads', data=phasedata)
def addparttagsfnodescache(repo, bundler, outgoing):
# we include the tags fnode cache for the bundle changeset
@@ -1473,7 +1540,7 @@
part = bundle.newpart('changegroup', data=cg.getchunks())
part.addparam('version', cg.version)
if 'clcount' in cg.extras:
- part.addparam('nbchanges', str(cg.extras['clcount']),
+ part.addparam('nbchanges', '%d' % cg.extras['clcount'],
mandatory=False)
chunkiter = bundle.getchunks()
else:
@@ -1554,7 +1621,8 @@
# This is definitely not the final form of this
# return. But one need to start somewhere.
part = op.reply.newpart('reply:changegroup', mandatory=False)
- part.addparam('in-reply-to', str(inpart.id), mandatory=False)
+ part.addparam(
+ 'in-reply-to', pycompat.bytestr(inpart.id), mandatory=False)
part.addparam('return', '%i' % ret, mandatory=False)
assert not inpart.read()
@@ -1617,7 +1685,8 @@
# This is definitely not the final form of this
# return. But one need to start somewhere.
part = op.reply.newpart('reply:changegroup')
- part.addparam('in-reply-to', str(inpart.id), mandatory=False)
+ part.addparam(
+ 'in-reply-to', pycompat.bytestr(inpart.id), mandatory=False)
part.addparam('return', '%i' % ret, mandatory=False)
try:
real_part.validate()
@@ -1760,7 +1829,8 @@
op.records.add('pushkey', record)
if op.reply is not None:
rpart = op.reply.newpart('reply:pushkey')
- rpart.addparam('in-reply-to', str(inpart.id), mandatory=False)
+ rpart.addparam(
+ 'in-reply-to', pycompat.bytestr(inpart.id), mandatory=False)
rpart.addparam('return', '%i' % ret, mandatory=False)
if inpart.mandatory and not ret:
kwargs = {}
@@ -1769,24 +1839,11 @@
kwargs[key] = inpart.params[key]
raise error.PushkeyFailed(partid=str(inpart.id), **kwargs)
-def _readphaseheads(inpart):
- headsbyphase = [[] for i in phases.allphases]
- entrysize = struct.calcsize(_fphasesentry)
- while True:
- entry = inpart.read(entrysize)
- if len(entry) < entrysize:
- if entry:
- raise error.Abort(_('bad phase-heads bundle part'))
- break
- phase, node = struct.unpack(_fphasesentry, entry)
- headsbyphase[phase].append(node)
- return headsbyphase
-
@parthandler('phase-heads')
def handlephases(op, inpart):
"""apply phases from bundle part to repo"""
- headsbyphase = _readphaseheads(inpart)
- phases.updatephases(op.repo.unfiltered(), op.gettransaction(), headsbyphase)
+ headsbyphase = phases.binarydecode(inpart)
+ phases.updatephases(op.repo.unfiltered(), op.gettransaction, headsbyphase)
@parthandler('reply:pushkey', ('return', 'in-reply-to'))
def handlepushkeyreply(op, inpart):
@@ -1815,7 +1872,8 @@
op.records.add('obsmarkers', {'new': new})
if op.reply is not None:
rpart = op.reply.newpart('reply:obsmarkers')
- rpart.addparam('in-reply-to', str(inpart.id), mandatory=False)
+ rpart.addparam(
+ 'in-reply-to', pycompat.bytestr(inpart.id), mandatory=False)
rpart.addparam('new', '%i' % new, mandatory=False)
@@ -1849,3 +1907,17 @@
cache.write()
op.ui.debug('applied %i hgtags fnodes cache entries\n' % count)
+
+@parthandler('pushvars')
+def bundle2getvars(op, part):
+ '''unbundle a bundle2 containing shellvars on the server'''
+ # An option to disable unbundling on server-side for security reasons
+ if op.ui.configbool('push', 'pushvars.server'):
+ hookargs = {}
+ for key, value in part.advisoryparams:
+ key = key.upper()
+ # We want pushed variables to have USERVAR_ prepended so we know
+ # they came from the --pushvar flag.
+ key = "USERVAR_" + key
+ hookargs[key] = value
+ op.addhookargs(hookargs)
--- a/mercurial/bundlerepo.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/bundlerepo.py Sat Sep 30 07:52:48 2017 -0700
@@ -55,17 +55,9 @@
self.bundle = bundle
n = len(self)
self.repotiprev = n - 1
- chain = None
self.bundlerevs = set() # used by 'bundle()' revset expression
- getchunk = lambda: bundle.deltachunk(chain)
- for chunkdata in iter(getchunk, {}):
- node = chunkdata['node']
- p1 = chunkdata['p1']
- p2 = chunkdata['p2']
- cs = chunkdata['cs']
- deltabase = chunkdata['deltabase']
- delta = chunkdata['delta']
- flags = chunkdata['flags']
+ for deltadata in bundle.deltaiter():
+ node, p1, p2, cs, deltabase, delta, flags = deltadata
size = len(delta)
start = bundle.tell() - size
@@ -73,7 +65,6 @@
link = linkmapper(cs)
if node in self.nodemap:
# this can happen if two branches make the same change
- chain = node
self.bundlerevs.add(self.nodemap[node])
continue
@@ -93,7 +84,6 @@
self.index.insert(-1, e)
self.nodemap[node] = n
self.bundlerevs.add(n)
- chain = node
n += 1
def _chunk(self, rev):
@@ -164,7 +154,7 @@
def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
raise NotImplementedError
- def addgroup(self, revs, linkmapper, transaction):
+ def addgroup(self, deltas, transaction, addrevisioncb=None):
raise NotImplementedError
def strip(self, rev, minlink):
raise NotImplementedError
@@ -264,24 +254,6 @@
class bundlerepository(localrepo.localrepository):
def __init__(self, ui, path, bundlename):
- def _writetempbundle(read, suffix, header=''):
- """Write a temporary file to disk
-
- This is closure because we need to make sure this tracked by
- self.tempfile for cleanup purposes."""
- fdtemp, temp = self.vfs.mkstemp(prefix="hg-bundle-",
- suffix=".hg10un")
- self.tempfile = temp
-
- with os.fdopen(fdtemp, pycompat.sysstr('wb')) as fptemp:
- fptemp.write(header)
- while True:
- chunk = read(2**18)
- if not chunk:
- break
- fptemp.write(chunk)
-
- return self.vfs.open(self.tempfile, mode="rb")
self._tempparent = None
try:
localrepo.localrepository.__init__(self, ui, path)
@@ -301,30 +273,22 @@
self.bundlefile = self.bundle = exchange.readbundle(ui, f, bundlename)
if isinstance(self.bundle, bundle2.unbundle20):
- cgstream = None
+ hadchangegroup = False
for part in self.bundle.iterparts():
if part.type == 'changegroup':
- if cgstream is not None:
+ if hadchangegroup:
raise NotImplementedError("can't process "
"multiple changegroups")
- cgstream = part
- version = part.params.get('version', '01')
- legalcgvers = changegroup.supportedincomingversions(self)
- if version not in legalcgvers:
- msg = _('Unsupported changegroup version: %s')
- raise error.Abort(msg % version)
- if self.bundle.compressed():
- cgstream = _writetempbundle(part.read,
- ".cg%sun" % version)
+ hadchangegroup = True
- if cgstream is None:
- raise error.Abort(_('No changegroups found'))
- cgstream.seek(0)
+ self._handlebundle2part(part)
- self.bundle = changegroup.getunbundler(version, cgstream, 'UN')
+ if not hadchangegroup:
+ raise error.Abort(_("No changegroups found"))
elif self.bundle.compressed():
- f = _writetempbundle(self.bundle.read, '.hg10un', header='HG10UN')
+ f = self._writetempbundle(self.bundle.read, '.hg10un',
+ header='HG10UN')
self.bundlefile = self.bundle = exchange.readbundle(ui, f,
bundlename,
self.vfs)
@@ -336,6 +300,37 @@
phases.retractboundary(self, None, phases.draft,
[ctx.node() for ctx in self[self.firstnewrev:]])
+ def _handlebundle2part(self, part):
+ if part.type == 'changegroup':
+ cgstream = part
+ version = part.params.get('version', '01')
+ legalcgvers = changegroup.supportedincomingversions(self)
+ if version not in legalcgvers:
+ msg = _('Unsupported changegroup version: %s')
+ raise error.Abort(msg % version)
+ if self.bundle.compressed():
+ cgstream = self._writetempbundle(part.read,
+ ".cg%sun" % version)
+
+ self.bundle = changegroup.getunbundler(version, cgstream, 'UN')
+
+ def _writetempbundle(self, readfn, suffix, header=''):
+ """Write a temporary file to disk
+ """
+ fdtemp, temp = self.vfs.mkstemp(prefix="hg-bundle-",
+ suffix=".hg10un")
+ self.tempfile = temp
+
+ with os.fdopen(fdtemp, pycompat.sysstr('wb')) as fptemp:
+ fptemp.write(header)
+ while True:
+ chunk = readfn(2**18)
+ if not chunk:
+ break
+ fptemp.write(chunk)
+
+ return self.vfs.open(self.tempfile, mode="rb")
+
@localrepo.unfilteredpropertycache
def _phasecache(self):
return bundlephasecache(self, self._phasedefaults)
--- a/mercurial/byterange.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/byterange.py Sat Sep 30 07:52:48 2017 -0700
@@ -91,7 +91,7 @@
Examples:
# expose 10 bytes, starting at byte position 20, from
# /etc/aliases.
- >>> fo = RangeableFileObject(file('/etc/passwd', 'r'), (20,30))
+ >>> fo = RangeableFileObject(file(b'/etc/passwd', b'r'), (20,30))
# seek seeks within the range (to position 23 in this case)
>>> fo.seek(3)
# tell tells where your at _within the range_ (position 3 in
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cext/charencode.c Sat Sep 30 07:52:48 2017 -0700
@@ -0,0 +1,392 @@
+/*
+ charencode.c - miscellaneous character encoding
+
+ Copyright 2008 Matt Mackall <mpm@selenic.com> and others
+
+ This software may be used and distributed according to the terms of
+ the GNU General Public License, incorporated herein by reference.
+*/
+
+#define PY_SSIZE_T_CLEAN
+#include <Python.h>
+#include <assert.h>
+
+#include "charencode.h"
+#include "compat.h"
+#include "util.h"
+
+#ifdef IS_PY3K
+/* The mapping of Python types is meant to be temporary to get Python
+ * 3 to compile. We should remove this once Python 3 support is fully
+ * supported and proper types are used in the extensions themselves. */
+#define PyInt_Type PyLong_Type
+#define PyInt_AS_LONG PyLong_AS_LONG
+#endif
+
+static const char lowertable[128] = {
+ '\x00', '\x01', '\x02', '\x03', '\x04', '\x05', '\x06', '\x07',
+ '\x08', '\x09', '\x0a', '\x0b', '\x0c', '\x0d', '\x0e', '\x0f',
+ '\x10', '\x11', '\x12', '\x13', '\x14', '\x15', '\x16', '\x17',
+ '\x18', '\x19', '\x1a', '\x1b', '\x1c', '\x1d', '\x1e', '\x1f',
+ '\x20', '\x21', '\x22', '\x23', '\x24', '\x25', '\x26', '\x27',
+ '\x28', '\x29', '\x2a', '\x2b', '\x2c', '\x2d', '\x2e', '\x2f',
+ '\x30', '\x31', '\x32', '\x33', '\x34', '\x35', '\x36', '\x37',
+ '\x38', '\x39', '\x3a', '\x3b', '\x3c', '\x3d', '\x3e', '\x3f',
+ '\x40',
+ '\x61', '\x62', '\x63', '\x64', '\x65', '\x66', '\x67', /* A-G */
+ '\x68', '\x69', '\x6a', '\x6b', '\x6c', '\x6d', '\x6e', '\x6f', /* H-O */
+ '\x70', '\x71', '\x72', '\x73', '\x74', '\x75', '\x76', '\x77', /* P-W */
+ '\x78', '\x79', '\x7a', /* X-Z */
+ '\x5b', '\x5c', '\x5d', '\x5e', '\x5f',
+ '\x60', '\x61', '\x62', '\x63', '\x64', '\x65', '\x66', '\x67',
+ '\x68', '\x69', '\x6a', '\x6b', '\x6c', '\x6d', '\x6e', '\x6f',
+ '\x70', '\x71', '\x72', '\x73', '\x74', '\x75', '\x76', '\x77',
+ '\x78', '\x79', '\x7a', '\x7b', '\x7c', '\x7d', '\x7e', '\x7f'
+};
+
+static const char uppertable[128] = {
+ '\x00', '\x01', '\x02', '\x03', '\x04', '\x05', '\x06', '\x07',
+ '\x08', '\x09', '\x0a', '\x0b', '\x0c', '\x0d', '\x0e', '\x0f',
+ '\x10', '\x11', '\x12', '\x13', '\x14', '\x15', '\x16', '\x17',
+ '\x18', '\x19', '\x1a', '\x1b', '\x1c', '\x1d', '\x1e', '\x1f',
+ '\x20', '\x21', '\x22', '\x23', '\x24', '\x25', '\x26', '\x27',
+ '\x28', '\x29', '\x2a', '\x2b', '\x2c', '\x2d', '\x2e', '\x2f',
+ '\x30', '\x31', '\x32', '\x33', '\x34', '\x35', '\x36', '\x37',
+ '\x38', '\x39', '\x3a', '\x3b', '\x3c', '\x3d', '\x3e', '\x3f',
+ '\x40', '\x41', '\x42', '\x43', '\x44', '\x45', '\x46', '\x47',
+ '\x48', '\x49', '\x4a', '\x4b', '\x4c', '\x4d', '\x4e', '\x4f',
+ '\x50', '\x51', '\x52', '\x53', '\x54', '\x55', '\x56', '\x57',
+ '\x58', '\x59', '\x5a', '\x5b', '\x5c', '\x5d', '\x5e', '\x5f',
+ '\x60',
+ '\x41', '\x42', '\x43', '\x44', '\x45', '\x46', '\x47', /* a-g */
+ '\x48', '\x49', '\x4a', '\x4b', '\x4c', '\x4d', '\x4e', '\x4f', /* h-o */
+ '\x50', '\x51', '\x52', '\x53', '\x54', '\x55', '\x56', '\x57', /* p-w */
+ '\x58', '\x59', '\x5a', /* x-z */
+ '\x7b', '\x7c', '\x7d', '\x7e', '\x7f'
+};
+
+/* 1: no escape, 2: \<c>, 6: \u<x> */
+static const uint8_t jsonlentable[256] = {
+ 6, 6, 6, 6, 6, 6, 6, 6, 2, 2, 2, 6, 2, 2, 6, 6, /* b, t, n, f, r */
+ 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
+ 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* " */
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, /* \\ */
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 6, /* DEL */
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+};
+
+static const uint8_t jsonparanoidlentable[128] = {
+ 6, 6, 6, 6, 6, 6, 6, 6, 2, 2, 2, 6, 2, 2, 6, 6, /* b, t, n, f, r */
+ 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
+ 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* " */
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 6, 1, 6, 1, /* <, > */
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, /* \\ */
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 6, /* DEL */
+};
+
+static const char hexchartable[16] = {
+ '0', '1', '2', '3', '4', '5', '6', '7',
+ '8', '9', 'a', 'b', 'c', 'd', 'e', 'f',
+};
+
+/*
+ * Turn a hex-encoded string into binary.
+ */
+PyObject *unhexlify(const char *str, Py_ssize_t len)
+{
+ PyObject *ret;
+ char *d;
+ Py_ssize_t i;
+
+ ret = PyBytes_FromStringAndSize(NULL, len / 2);
+
+ if (!ret)
+ return NULL;
+
+ d = PyBytes_AsString(ret);
+
+ for (i = 0; i < len;) {
+ int hi = hexdigit(str, i++);
+ int lo = hexdigit(str, i++);
+ *d++ = (hi << 4) | lo;
+ }
+
+ return ret;
+}
+
+PyObject *isasciistr(PyObject *self, PyObject *args)
+{
+ const char *buf;
+ Py_ssize_t i, len;
+ if (!PyArg_ParseTuple(args, "s#:isasciistr", &buf, &len))
+ return NULL;
+ i = 0;
+ /* char array in PyStringObject should be at least 4-byte aligned */
+ if (((uintptr_t)buf & 3) == 0) {
+ const uint32_t *p = (const uint32_t *)buf;
+ for (; i < len / 4; i++) {
+ if (p[i] & 0x80808080U)
+ Py_RETURN_FALSE;
+ }
+ i *= 4;
+ }
+ for (; i < len; i++) {
+ if (buf[i] & 0x80)
+ Py_RETURN_FALSE;
+ }
+ Py_RETURN_TRUE;
+}
+
+static inline PyObject *_asciitransform(PyObject *str_obj,
+ const char table[128],
+ PyObject *fallback_fn)
+{
+ char *str, *newstr;
+ Py_ssize_t i, len;
+ PyObject *newobj = NULL;
+ PyObject *ret = NULL;
+
+ str = PyBytes_AS_STRING(str_obj);
+ len = PyBytes_GET_SIZE(str_obj);
+
+ newobj = PyBytes_FromStringAndSize(NULL, len);
+ if (!newobj)
+ goto quit;
+
+ newstr = PyBytes_AS_STRING(newobj);
+
+ for (i = 0; i < len; i++) {
+ char c = str[i];
+ if (c & 0x80) {
+ if (fallback_fn != NULL) {
+ ret = PyObject_CallFunctionObjArgs(fallback_fn,
+ str_obj, NULL);
+ } else {
+ PyObject *err = PyUnicodeDecodeError_Create(
+ "ascii", str, len, i, (i + 1),
+ "unexpected code byte");
+ PyErr_SetObject(PyExc_UnicodeDecodeError, err);
+ Py_XDECREF(err);
+ }
+ goto quit;
+ }
+ newstr[i] = table[(unsigned char)c];
+ }
+
+ ret = newobj;
+ Py_INCREF(ret);
+quit:
+ Py_XDECREF(newobj);
+ return ret;
+}
+
+PyObject *asciilower(PyObject *self, PyObject *args)
+{
+ PyObject *str_obj;
+ if (!PyArg_ParseTuple(args, "O!:asciilower", &PyBytes_Type, &str_obj))
+ return NULL;
+ return _asciitransform(str_obj, lowertable, NULL);
+}
+
+PyObject *asciiupper(PyObject *self, PyObject *args)
+{
+ PyObject *str_obj;
+ if (!PyArg_ParseTuple(args, "O!:asciiupper", &PyBytes_Type, &str_obj))
+ return NULL;
+ return _asciitransform(str_obj, uppertable, NULL);
+}
+
+PyObject *make_file_foldmap(PyObject *self, PyObject *args)
+{
+ PyObject *dmap, *spec_obj, *normcase_fallback;
+ PyObject *file_foldmap = NULL;
+ enum normcase_spec spec;
+ PyObject *k, *v;
+ dirstateTupleObject *tuple;
+ Py_ssize_t pos = 0;
+ const char *table;
+
+ if (!PyArg_ParseTuple(args, "O!O!O!:make_file_foldmap",
+ &PyDict_Type, &dmap,
+ &PyInt_Type, &spec_obj,
+ &PyFunction_Type, &normcase_fallback))
+ goto quit;
+
+ spec = (int)PyInt_AS_LONG(spec_obj);
+ switch (spec) {
+ case NORMCASE_LOWER:
+ table = lowertable;
+ break;
+ case NORMCASE_UPPER:
+ table = uppertable;
+ break;
+ case NORMCASE_OTHER:
+ table = NULL;
+ break;
+ default:
+ PyErr_SetString(PyExc_TypeError, "invalid normcasespec");
+ goto quit;
+ }
+
+ /* Add some more entries to deal with additions outside this
+ function. */
+ file_foldmap = _dict_new_presized((PyDict_Size(dmap) / 10) * 11);
+ if (file_foldmap == NULL)
+ goto quit;
+
+ while (PyDict_Next(dmap, &pos, &k, &v)) {
+ if (!dirstate_tuple_check(v)) {
+ PyErr_SetString(PyExc_TypeError,
+ "expected a dirstate tuple");
+ goto quit;
+ }
+
+ tuple = (dirstateTupleObject *)v;
+ if (tuple->state != 'r') {
+ PyObject *normed;
+ if (table != NULL) {
+ normed = _asciitransform(k, table,
+ normcase_fallback);
+ } else {
+ normed = PyObject_CallFunctionObjArgs(
+ normcase_fallback, k, NULL);
+ }
+
+ if (normed == NULL)
+ goto quit;
+ if (PyDict_SetItem(file_foldmap, normed, k) == -1) {
+ Py_DECREF(normed);
+ goto quit;
+ }
+ Py_DECREF(normed);
+ }
+ }
+ return file_foldmap;
+quit:
+ Py_XDECREF(file_foldmap);
+ return NULL;
+}
+
+/* calculate length of JSON-escaped string; returns -1 if unsupported */
+static Py_ssize_t jsonescapelen(const char *buf, Py_ssize_t len, bool paranoid)
+{
+ Py_ssize_t i, esclen = 0;
+
+ if (paranoid) {
+ /* don't want to process multi-byte escapes in C */
+ for (i = 0; i < len; i++) {
+ char c = buf[i];
+ if (c & 0x80) {
+ PyErr_SetString(PyExc_ValueError,
+ "cannot process non-ascii str");
+ return -1;
+ }
+ esclen += jsonparanoidlentable[(unsigned char)c];
+ if (esclen < 0) {
+ PyErr_SetString(PyExc_MemoryError,
+ "overflow in jsonescapelen");
+ return -1;
+ }
+ }
+ } else {
+ for (i = 0; i < len; i++) {
+ char c = buf[i];
+ esclen += jsonlentable[(unsigned char)c];
+ if (esclen < 0) {
+ PyErr_SetString(PyExc_MemoryError,
+ "overflow in jsonescapelen");
+ return -1;
+ }
+ }
+ }
+
+ return esclen;
+}
+
+/* map '\<c>' escape character */
+static char jsonescapechar2(char c)
+{
+ switch (c) {
+ case '\b': return 'b';
+ case '\t': return 't';
+ case '\n': return 'n';
+ case '\f': return 'f';
+ case '\r': return 'r';
+ case '"': return '"';
+ case '\\': return '\\';
+ }
+ return '\0'; /* should not happen */
+}
+
+/* convert 'origbuf' to JSON-escaped form 'escbuf'; 'origbuf' should only
+ include characters mappable by json(paranoid)lentable */
+static void encodejsonescape(char *escbuf, Py_ssize_t esclen,
+ const char *origbuf, Py_ssize_t origlen,
+ bool paranoid)
+{
+ const uint8_t *lentable =
+ (paranoid) ? jsonparanoidlentable : jsonlentable;
+ Py_ssize_t i, j;
+
+ for (i = 0, j = 0; i < origlen; i++) {
+ char c = origbuf[i];
+ uint8_t l = lentable[(unsigned char)c];
+ assert(j + l <= esclen);
+ switch (l) {
+ case 1:
+ escbuf[j] = c;
+ break;
+ case 2:
+ escbuf[j] = '\\';
+ escbuf[j + 1] = jsonescapechar2(c);
+ break;
+ case 6:
+ memcpy(escbuf + j, "\\u00", 4);
+ escbuf[j + 4] = hexchartable[(unsigned char)c >> 4];
+ escbuf[j + 5] = hexchartable[(unsigned char)c & 0xf];
+ break;
+ }
+ j += l;
+ }
+}
+
+PyObject *jsonescapeu8fast(PyObject *self, PyObject *args)
+{
+ PyObject *origstr, *escstr;
+ const char *origbuf;
+ Py_ssize_t origlen, esclen;
+ int paranoid;
+ if (!PyArg_ParseTuple(args, "O!i:jsonescapeu8fast",
+ &PyBytes_Type, &origstr, ¶noid))
+ return NULL;
+
+ origbuf = PyBytes_AS_STRING(origstr);
+ origlen = PyBytes_GET_SIZE(origstr);
+ esclen = jsonescapelen(origbuf, origlen, paranoid);
+ if (esclen < 0)
+ return NULL; /* unsupported char found or overflow */
+ if (origlen == esclen) {
+ Py_INCREF(origstr);
+ return origstr;
+ }
+
+ escstr = PyBytes_FromStringAndSize(NULL, esclen);
+ if (!escstr)
+ return NULL;
+ encodejsonescape(PyBytes_AS_STRING(escstr), esclen, origbuf, origlen,
+ paranoid);
+
+ return escstr;
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cext/charencode.h Sat Sep 30 07:52:48 2017 -0700
@@ -0,0 +1,59 @@
+/*
+ charencode.h - miscellaneous character encoding
+
+ This software may be used and distributed according to the terms of
+ the GNU General Public License, incorporated herein by reference.
+*/
+
+#ifndef _HG_CHARENCODE_H_
+#define _HG_CHARENCODE_H_
+
+#include <Python.h>
+#include "compat.h"
+
+/* This should be kept in sync with normcasespecs in encoding.py. */
+enum normcase_spec {
+ NORMCASE_LOWER = -1,
+ NORMCASE_UPPER = 1,
+ NORMCASE_OTHER = 0
+};
+
+PyObject *unhexlify(const char *str, Py_ssize_t len);
+PyObject *isasciistr(PyObject *self, PyObject *args);
+PyObject *asciilower(PyObject *self, PyObject *args);
+PyObject *asciiupper(PyObject *self, PyObject *args);
+PyObject *make_file_foldmap(PyObject *self, PyObject *args);
+PyObject *jsonescapeu8fast(PyObject *self, PyObject *args);
+
+static const int8_t hextable[256] = {
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, -1, -1, -1, -1, -1, -1, /* 0-9 */
+ -1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1, /* A-F */
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1, /* a-f */
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1
+};
+
+static inline int hexdigit(const char *p, Py_ssize_t off)
+{
+ int8_t val = hextable[(unsigned char)p[off]];
+
+ if (val >= 0) {
+ return val;
+ }
+
+ PyErr_SetString(PyExc_ValueError, "input contains non-hex character");
+ return 0;
+}
+
+#endif /* _HG_CHARENCODE_H_ */
--- a/mercurial/cext/manifest.c Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/cext/manifest.c Sat Sep 30 07:52:48 2017 -0700
@@ -12,6 +12,7 @@
#include <string.h>
#include <stdlib.h>
+#include "charencode.h"
#include "util.h"
#define DEFAULT_LINES 100000
@@ -38,9 +39,6 @@
#define MANIFEST_NOT_SORTED -2
#define MANIFEST_MALFORMED -3
-/* defined in parsers.c */
-PyObject *unhexlify(const char *str, int len);
-
/* get the length of the path for a line */
static size_t pathlen(line *l) {
return strlen(l->start);
--- a/mercurial/cext/parsers.c Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/cext/parsers.c Sat Sep 30 07:52:48 2017 -0700
@@ -12,6 +12,7 @@
#include <stddef.h>
#include <string.h>
+#include "charencode.h"
#include "util.h"
#include "bitmanipulation.h"
@@ -19,154 +20,14 @@
/* The mapping of Python types is meant to be temporary to get Python
* 3 to compile. We should remove this once Python 3 support is fully
* supported and proper types are used in the extensions themselves. */
-#define PyInt_Type PyLong_Type
#define PyInt_Check PyLong_Check
#define PyInt_FromLong PyLong_FromLong
#define PyInt_FromSsize_t PyLong_FromSsize_t
-#define PyInt_AS_LONG PyLong_AS_LONG
#define PyInt_AsLong PyLong_AsLong
#endif
static const char *const versionerrortext = "Python minor version mismatch";
-static const char lowertable[128] = {
- '\x00', '\x01', '\x02', '\x03', '\x04', '\x05', '\x06', '\x07',
- '\x08', '\x09', '\x0a', '\x0b', '\x0c', '\x0d', '\x0e', '\x0f',
- '\x10', '\x11', '\x12', '\x13', '\x14', '\x15', '\x16', '\x17',
- '\x18', '\x19', '\x1a', '\x1b', '\x1c', '\x1d', '\x1e', '\x1f',
- '\x20', '\x21', '\x22', '\x23', '\x24', '\x25', '\x26', '\x27',
- '\x28', '\x29', '\x2a', '\x2b', '\x2c', '\x2d', '\x2e', '\x2f',
- '\x30', '\x31', '\x32', '\x33', '\x34', '\x35', '\x36', '\x37',
- '\x38', '\x39', '\x3a', '\x3b', '\x3c', '\x3d', '\x3e', '\x3f',
- '\x40',
- '\x61', '\x62', '\x63', '\x64', '\x65', '\x66', '\x67', /* A-G */
- '\x68', '\x69', '\x6a', '\x6b', '\x6c', '\x6d', '\x6e', '\x6f', /* H-O */
- '\x70', '\x71', '\x72', '\x73', '\x74', '\x75', '\x76', '\x77', /* P-W */
- '\x78', '\x79', '\x7a', /* X-Z */
- '\x5b', '\x5c', '\x5d', '\x5e', '\x5f',
- '\x60', '\x61', '\x62', '\x63', '\x64', '\x65', '\x66', '\x67',
- '\x68', '\x69', '\x6a', '\x6b', '\x6c', '\x6d', '\x6e', '\x6f',
- '\x70', '\x71', '\x72', '\x73', '\x74', '\x75', '\x76', '\x77',
- '\x78', '\x79', '\x7a', '\x7b', '\x7c', '\x7d', '\x7e', '\x7f'
-};
-
-static const char uppertable[128] = {
- '\x00', '\x01', '\x02', '\x03', '\x04', '\x05', '\x06', '\x07',
- '\x08', '\x09', '\x0a', '\x0b', '\x0c', '\x0d', '\x0e', '\x0f',
- '\x10', '\x11', '\x12', '\x13', '\x14', '\x15', '\x16', '\x17',
- '\x18', '\x19', '\x1a', '\x1b', '\x1c', '\x1d', '\x1e', '\x1f',
- '\x20', '\x21', '\x22', '\x23', '\x24', '\x25', '\x26', '\x27',
- '\x28', '\x29', '\x2a', '\x2b', '\x2c', '\x2d', '\x2e', '\x2f',
- '\x30', '\x31', '\x32', '\x33', '\x34', '\x35', '\x36', '\x37',
- '\x38', '\x39', '\x3a', '\x3b', '\x3c', '\x3d', '\x3e', '\x3f',
- '\x40', '\x41', '\x42', '\x43', '\x44', '\x45', '\x46', '\x47',
- '\x48', '\x49', '\x4a', '\x4b', '\x4c', '\x4d', '\x4e', '\x4f',
- '\x50', '\x51', '\x52', '\x53', '\x54', '\x55', '\x56', '\x57',
- '\x58', '\x59', '\x5a', '\x5b', '\x5c', '\x5d', '\x5e', '\x5f',
- '\x60',
- '\x41', '\x42', '\x43', '\x44', '\x45', '\x46', '\x47', /* a-g */
- '\x48', '\x49', '\x4a', '\x4b', '\x4c', '\x4d', '\x4e', '\x4f', /* h-o */
- '\x50', '\x51', '\x52', '\x53', '\x54', '\x55', '\x56', '\x57', /* p-w */
- '\x58', '\x59', '\x5a', /* x-z */
- '\x7b', '\x7c', '\x7d', '\x7e', '\x7f'
-};
-
-/*
- * Turn a hex-encoded string into binary.
- */
-PyObject *unhexlify(const char *str, int len)
-{
- PyObject *ret;
- char *d;
- int i;
-
- ret = PyBytes_FromStringAndSize(NULL, len / 2);
-
- if (!ret)
- return NULL;
-
- d = PyBytes_AsString(ret);
-
- for (i = 0; i < len;) {
- int hi = hexdigit(str, i++);
- int lo = hexdigit(str, i++);
- *d++ = (hi << 4) | lo;
- }
-
- return ret;
-}
-
-static inline PyObject *_asciitransform(PyObject *str_obj,
- const char table[128],
- PyObject *fallback_fn)
-{
- char *str, *newstr;
- Py_ssize_t i, len;
- PyObject *newobj = NULL;
- PyObject *ret = NULL;
-
- str = PyBytes_AS_STRING(str_obj);
- len = PyBytes_GET_SIZE(str_obj);
-
- newobj = PyBytes_FromStringAndSize(NULL, len);
- if (!newobj)
- goto quit;
-
- newstr = PyBytes_AS_STRING(newobj);
-
- for (i = 0; i < len; i++) {
- char c = str[i];
- if (c & 0x80) {
- if (fallback_fn != NULL) {
- ret = PyObject_CallFunctionObjArgs(fallback_fn,
- str_obj, NULL);
- } else {
- PyObject *err = PyUnicodeDecodeError_Create(
- "ascii", str, len, i, (i + 1),
- "unexpected code byte");
- PyErr_SetObject(PyExc_UnicodeDecodeError, err);
- Py_XDECREF(err);
- }
- goto quit;
- }
- newstr[i] = table[(unsigned char)c];
- }
-
- ret = newobj;
- Py_INCREF(ret);
-quit:
- Py_XDECREF(newobj);
- return ret;
-}
-
-static PyObject *asciilower(PyObject *self, PyObject *args)
-{
- PyObject *str_obj;
- if (!PyArg_ParseTuple(args, "O!:asciilower", &PyBytes_Type, &str_obj))
- return NULL;
- return _asciitransform(str_obj, lowertable, NULL);
-}
-
-static PyObject *asciiupper(PyObject *self, PyObject *args)
-{
- PyObject *str_obj;
- if (!PyArg_ParseTuple(args, "O!:asciiupper", &PyBytes_Type, &str_obj))
- return NULL;
- return _asciitransform(str_obj, uppertable, NULL);
-}
-
-static inline PyObject *_dict_new_presized(Py_ssize_t expected_size)
-{
- /* _PyDict_NewPresized expects a minused parameter, but it actually
- creates a dictionary that's the nearest power of two bigger than the
- parameter. For example, with the initial minused = 1000, the
- dictionary created has size 1024. Of course in a lot of cases that
- can be greater than the maximum load factor Python's dict object
- expects (= 2/3), so as soon as we cross the threshold we'll resize
- anyway. So create a dictionary that's at least 3/2 the size. */
- return _PyDict_NewPresized(((1 + expected_size) / 2) * 3);
-}
-
static PyObject *dict_new_presized(PyObject *self, PyObject *args)
{
Py_ssize_t expected_size;
@@ -177,77 +38,6 @@
return _dict_new_presized(expected_size);
}
-static PyObject *make_file_foldmap(PyObject *self, PyObject *args)
-{
- PyObject *dmap, *spec_obj, *normcase_fallback;
- PyObject *file_foldmap = NULL;
- enum normcase_spec spec;
- PyObject *k, *v;
- dirstateTupleObject *tuple;
- Py_ssize_t pos = 0;
- const char *table;
-
- if (!PyArg_ParseTuple(args, "O!O!O!:make_file_foldmap",
- &PyDict_Type, &dmap,
- &PyInt_Type, &spec_obj,
- &PyFunction_Type, &normcase_fallback))
- goto quit;
-
- spec = (int)PyInt_AS_LONG(spec_obj);
- switch (spec) {
- case NORMCASE_LOWER:
- table = lowertable;
- break;
- case NORMCASE_UPPER:
- table = uppertable;
- break;
- case NORMCASE_OTHER:
- table = NULL;
- break;
- default:
- PyErr_SetString(PyExc_TypeError, "invalid normcasespec");
- goto quit;
- }
-
- /* Add some more entries to deal with additions outside this
- function. */
- file_foldmap = _dict_new_presized((PyDict_Size(dmap) / 10) * 11);
- if (file_foldmap == NULL)
- goto quit;
-
- while (PyDict_Next(dmap, &pos, &k, &v)) {
- if (!dirstate_tuple_check(v)) {
- PyErr_SetString(PyExc_TypeError,
- "expected a dirstate tuple");
- goto quit;
- }
-
- tuple = (dirstateTupleObject *)v;
- if (tuple->state != 'r') {
- PyObject *normed;
- if (table != NULL) {
- normed = _asciitransform(k, table,
- normcase_fallback);
- } else {
- normed = PyObject_CallFunctionObjArgs(
- normcase_fallback, k, NULL);
- }
-
- if (normed == NULL)
- goto quit;
- if (PyDict_SetItem(file_foldmap, normed, k) == -1) {
- Py_DECREF(normed);
- goto quit;
- }
- Py_DECREF(normed);
- }
- }
- return file_foldmap;
-quit:
- Py_XDECREF(file_foldmap);
- return NULL;
-}
-
/*
* This code assumes that a manifest is stitched together with newline
* ('\n') characters.
@@ -293,7 +83,7 @@
nlen = newline - zero - 1;
- node = unhexlify(zero + 1, nlen > 40 ? 40 : (int)nlen);
+ node = unhexlify(zero + 1, nlen > 40 ? 40 : (Py_ssize_t)nlen);
if (!node)
goto bail;
@@ -906,12 +696,15 @@
{"parse_manifest", parse_manifest, METH_VARARGS, "parse a manifest\n"},
{"parse_dirstate", parse_dirstate, METH_VARARGS, "parse a dirstate\n"},
{"parse_index2", parse_index2, METH_VARARGS, "parse a revlog index\n"},
+ {"isasciistr", isasciistr, METH_VARARGS, "check if an ASCII string\n"},
{"asciilower", asciilower, METH_VARARGS, "lowercase an ASCII string\n"},
{"asciiupper", asciiupper, METH_VARARGS, "uppercase an ASCII string\n"},
{"dict_new_presized", dict_new_presized, METH_VARARGS,
"construct a dict with an expected size\n"},
{"make_file_foldmap", make_file_foldmap, METH_VARARGS,
"make file foldmap\n"},
+ {"jsonescapeu8fast", jsonescapeu8fast, METH_VARARGS,
+ "escape a UTF-8 byte string to JSON (fast path)\n"},
{"encodedir", encodedir, METH_VARARGS, "encodedir a path\n"},
{"pathencode", pathencode, METH_VARARGS, "fncache-encode a path\n"},
{"lowerencode", lowerencode, METH_VARARGS, "lower-encode a path\n"},
@@ -924,7 +717,7 @@
void manifest_module_init(PyObject *mod);
void revlog_module_init(PyObject *mod);
-static const int version = 1;
+static const int version = 3;
static void module_init(PyObject *mod)
{
--- a/mercurial/cext/revlog.c Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/cext/revlog.c Sat Sep 30 07:52:48 2017 -0700
@@ -13,6 +13,7 @@
#include <stddef.h>
#include <string.h>
+#include "charencode.h"
#include "util.h"
#include "bitmanipulation.h"
--- a/mercurial/cext/util.h Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/cext/util.h Sat Sep 30 07:52:48 2017 -0700
@@ -25,13 +25,6 @@
extern PyTypeObject dirstateTupleType;
#define dirstate_tuple_check(op) (Py_TYPE(op) == &dirstateTupleType)
-/* This should be kept in sync with normcasespecs in encoding.py. */
-enum normcase_spec {
- NORMCASE_LOWER = -1,
- NORMCASE_UPPER = 1,
- NORMCASE_OTHER = 0
-};
-
#define MIN(a, b) (((a)<(b))?(a):(b))
/* VC9 doesn't include bool and lacks stdbool.h based on my searching */
#if defined(_MSC_VER) || __STDC_VERSION__ < 199901L
@@ -42,35 +35,16 @@
#include <stdbool.h>
#endif
-static const int8_t hextable[256] = {
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, -1, -1, -1, -1, -1, -1, /* 0-9 */
- -1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1, /* A-F */
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1, /* a-f */
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1
-};
-
-static inline int hexdigit(const char *p, Py_ssize_t off)
+static inline PyObject *_dict_new_presized(Py_ssize_t expected_size)
{
- int8_t val = hextable[(unsigned char)p[off]];
-
- if (val >= 0) {
- return val;
- }
-
- PyErr_SetString(PyExc_ValueError, "input contains non-hex character");
- return 0;
+ /* _PyDict_NewPresized expects a minused parameter, but it actually
+ creates a dictionary that's the nearest power of two bigger than the
+ parameter. For example, with the initial minused = 1000, the
+ dictionary created has size 1024. Of course in a lot of cases that
+ can be greater than the maximum load factor Python's dict object
+ expects (= 2/3), so as soon as we cross the threshold we'll resize
+ anyway. So create a dictionary that's at least 3/2 the size. */
+ return _PyDict_NewPresized(((1 + expected_size) / 2) * 3);
}
#endif /* _HG_UTIL_H_ */
--- a/mercurial/cffi/base85.py Mon Sep 18 10:54:00 2017 -0700
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,10 +0,0 @@
-# base85.py: pure python base85 codec
-#
-# Copyright (C) 2009 Brendan Cully <brendan@kublai.com>
-#
-# This software may be used and distributed according to the terms of the
-# GNU General Public License version 2 or any later version.
-
-from __future__ import absolute_import
-
-from ..pure.base85 import *
--- a/mercurial/cffi/diffhelpers.py Mon Sep 18 10:54:00 2017 -0700
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,10 +0,0 @@
-# diffhelpers.py - pure Python implementation of diffhelpers.c
-#
-# Copyright 2009 Matt Mackall <mpm@selenic.com> and others
-#
-# This software may be used and distributed according to the terms of the
-# GNU General Public License version 2 or any later version.
-
-from __future__ import absolute_import
-
-from ..pure.diffhelpers import *
--- a/mercurial/cffi/parsers.py Mon Sep 18 10:54:00 2017 -0700
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,10 +0,0 @@
-# parsers.py - Python implementation of parsers.c
-#
-# Copyright 2009 Matt Mackall <mpm@selenic.com> and others
-#
-# This software may be used and distributed according to the terms of the
-# GNU General Public License version 2 or any later version.
-
-from __future__ import absolute_import
-
-from ..pure.parsers import *
--- a/mercurial/changegroup.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/changegroup.py Sat Sep 30 07:52:48 2017 -0700
@@ -21,7 +21,6 @@
from . import (
dagutil,
- discovery,
error,
mdiff,
phases,
@@ -189,8 +188,7 @@
header = struct.unpack(self.deltaheader, headerdata)
delta = readexactly(self._stream, l - self.deltaheadersize)
node, p1, p2, deltabase, cs, flags = self._deltaheader(header, prevnode)
- return {'node': node, 'p1': p1, 'p2': p2, 'cs': cs,
- 'deltabase': deltabase, 'delta': delta, 'flags': flags}
+ return (node, p1, p2, cs, deltabase, delta, flags)
def getchunks(self):
"""returns all the chunks contains in the bundle
@@ -199,23 +197,36 @@
network API. To do so, it parse the changegroup data, otherwise it will
block in case of sshrepo because it don't know the end of the stream.
"""
- # an empty chunkgroup is the end of the changegroup
- # a changegroup has at least 2 chunkgroups (changelog and manifest).
- # after that, changegroup versions 1 and 2 have a series of groups
- # with one group per file. changegroup 3 has a series of directory
- # manifests before the files.
- count = 0
- emptycount = 0
- while emptycount < self._grouplistcount:
- empty = True
- count += 1
+ # For changegroup 1 and 2, we expect 3 parts: changelog, manifestlog,
+ # and a list of filelogs. For changegroup 3, we expect 4 parts:
+ # changelog, manifestlog, a list of tree manifestlogs, and a list of
+ # filelogs.
+ #
+ # Changelog and manifestlog parts are terminated with empty chunks. The
+ # tree and file parts are a list of entry sections. Each entry section
+ # is a series of chunks terminating in an empty chunk. The list of these
+ # entry sections is terminated in yet another empty chunk, so we know
+ # we've reached the end of the tree/file list when we reach an empty
+ # chunk that was proceeded by no non-empty chunks.
+
+ parts = 0
+ while parts < 2 + self._grouplistcount:
+ noentries = True
while True:
chunk = getchunk(self)
if not chunk:
- if empty and count > 2:
- emptycount += 1
+ # The first two empty chunks represent the end of the
+ # changelog and the manifestlog portions. The remaining
+ # empty chunks represent either A) the end of individual
+ # tree or file entries in the file list, or B) the end of
+ # the entire list. It's the end of the entire list if there
+ # were no entries (i.e. noentries is True).
+ if parts < 2:
+ parts += 1
+ elif noentries:
+ parts += 1
break
- empty = False
+ noentries = False
yield chunkheader(len(chunk))
pos = 0
while pos < len(chunk):
@@ -233,7 +244,8 @@
# no new manifest will be created and the manifest group will
# be empty during the pull
self.manifestheader()
- repo.manifestlog._revlog.addgroup(self, revmap, trp)
+ deltas = self.deltaiter()
+ repo.manifestlog._revlog.addgroup(deltas, revmap, trp)
repo.ui.progress(_('manifests'), None)
self.callback = None
@@ -266,7 +278,8 @@
# in this function.
srctype = tr.hookargs.setdefault('source', srctype)
url = tr.hookargs.setdefault('url', url)
- repo.hook('prechangegroup', throw=True, **tr.hookargs)
+ repo.hook('prechangegroup',
+ throw=True, **pycompat.strkwargs(tr.hookargs))
# write changelog data to temp files so concurrent readers
# will not see an inconsistent view
@@ -294,7 +307,8 @@
efiles.update(cl.readfiles(node))
self.changelogheader()
- cgnodes = cl.addgroup(self, csmap, trp, addrevisioncb=onchangelog)
+ deltas = self.deltaiter()
+ cgnodes = cl.addgroup(deltas, csmap, trp, addrevisioncb=onchangelog)
efiles = len(efiles)
if not cgnodes:
@@ -353,7 +367,8 @@
hookargs = dict(tr.hookargs)
hookargs['node'] = hex(cl.node(clstart))
hookargs['node_last'] = hex(cl.node(clend - 1))
- repo.hook('pretxnchangegroup', throw=True, **hookargs)
+ repo.hook('pretxnchangegroup',
+ throw=True, **pycompat.strkwargs(hookargs))
added = [cl.node(r) for r in xrange(clstart, clend)]
phaseall = None
@@ -388,13 +403,13 @@
if clstart >= len(repo):
return
- repo.hook("changegroup", **hookargs)
+ repo.hook("changegroup", **pycompat.strkwargs(hookargs))
for n in added:
args = hookargs.copy()
args['node'] = hex(n)
del args['node_last']
- repo.hook("incoming", **args)
+ repo.hook("incoming", **pycompat.strkwargs(args))
newheads = [h for h in repo.heads()
if h not in oldheads]
@@ -414,6 +429,18 @@
ret = deltaheads + 1
return ret
+ def deltaiter(self):
+ """
+ returns an iterator of the deltas in this changegroup
+
+ Useful for passing to the underlying storage system to be stored.
+ """
+ chain = None
+ for chunkdata in iter(lambda: self.deltachunk(chain), {}):
+ # Chunkdata: (node, p1, p2, cs, deltabase, delta, flags)
+ yield chunkdata
+ chain = chunkdata[0]
+
class cg2unpacker(cg1unpacker):
"""Unpacker for cg2 streams.
@@ -454,7 +481,8 @@
d = chunkdata["filename"]
repo.ui.debug("adding %s revisions\n" % d)
dirlog = repo.manifestlog._revlog.dirlog(d)
- if not dirlog.addgroup(self, revmap, trp):
+ deltas = self.deltaiter()
+ if not dirlog.addgroup(deltas, revmap, trp):
raise error.Abort(_("received dir revlog group is empty"))
class headerlessfixup(object):
@@ -624,7 +652,7 @@
'treemanifest' not in repo.requirements)
for chunk in self.generatemanifests(commonrevs, clrevorder,
- fastpathlinkrev, mfs, fnodes):
+ fastpathlinkrev, mfs, fnodes, source):
yield chunk
mfs.clear()
clrevs = set(cl.rev(x) for x in clnodes)
@@ -650,7 +678,12 @@
repo.hook('outgoing', node=hex(clnodes[0]), source=source)
def generatemanifests(self, commonrevs, clrevorder, fastpathlinkrev, mfs,
- fnodes):
+ fnodes, source):
+ """Returns an iterator of changegroup chunks containing manifests.
+
+ `source` is unused here, but is used by extensions like remotefilelog to
+ change what is sent based in pulls vs pushes, etc.
+ """
repo = self._repo
mfl = repo.manifestlog
dirlog = mfl._revlog.dirlog
@@ -902,7 +935,17 @@
for node in nodes:
repo.ui.debug("%s\n" % hex(node))
-def getsubsetraw(repo, outgoing, bundler, source, fastpath=False):
+def makechangegroup(repo, outgoing, version, source, fastpath=False,
+ bundlecaps=None):
+ cgstream = makestream(repo, outgoing, version, source,
+ fastpath=fastpath, bundlecaps=bundlecaps)
+ return getunbundler(version, util.chunkbuffer(cgstream), None,
+ {'clcount': len(outgoing.missing) })
+
+def makestream(repo, outgoing, version, source, fastpath=False,
+ bundlecaps=None):
+ bundler = getbundler(version, repo, bundlecaps=bundlecaps)
+
repo = repo.unfiltered()
commonrevs = outgoing.common
csets = outgoing.missing
@@ -918,59 +961,6 @@
_changegroupinfo(repo, csets, source)
return bundler.generate(commonrevs, csets, fastpathlinkrev, source)
-def getsubset(repo, outgoing, bundler, source, fastpath=False):
- gengroup = getsubsetraw(repo, outgoing, bundler, source, fastpath)
- return getunbundler(bundler.version, util.chunkbuffer(gengroup), None,
- {'clcount': len(outgoing.missing)})
-
-def changegroupsubset(repo, roots, heads, source, version='01'):
- """Compute a changegroup consisting of all the nodes that are
- descendants of any of the roots and ancestors of any of the heads.
- Return a chunkbuffer object whose read() method will return
- successive changegroup chunks.
-
- It is fairly complex as determining which filenodes and which
- manifest nodes need to be included for the changeset to be complete
- is non-trivial.
-
- Another wrinkle is doing the reverse, figuring out which changeset in
- the changegroup a particular filenode or manifestnode belongs to.
- """
- outgoing = discovery.outgoing(repo, missingroots=roots, missingheads=heads)
- bundler = getbundler(version, repo)
- return getsubset(repo, outgoing, bundler, source)
-
-def getlocalchangegroupraw(repo, source, outgoing, bundlecaps=None,
- version='01'):
- """Like getbundle, but taking a discovery.outgoing as an argument.
-
- This is only implemented for local repos and reuses potentially
- precomputed sets in outgoing. Returns a raw changegroup generator."""
- if not outgoing.missing:
- return None
- bundler = getbundler(version, repo, bundlecaps)
- return getsubsetraw(repo, outgoing, bundler, source)
-
-def getchangegroup(repo, source, outgoing, bundlecaps=None,
- version='01'):
- """Like getbundle, but taking a discovery.outgoing as an argument.
-
- This is only implemented for local repos and reuses potentially
- precomputed sets in outgoing."""
- if not outgoing.missing:
- return None
- bundler = getbundler(version, repo, bundlecaps)
- return getsubset(repo, outgoing, bundler, source)
-
-def getlocalchangegroup(repo, *args, **kwargs):
- repo.ui.deprecwarn('getlocalchangegroup is deprecated, use getchangegroup',
- '4.3')
- return getchangegroup(repo, *args, **kwargs)
-
-def changegroup(repo, basenodes, source):
- # to avoid a race we use changegroupsubset() (issue1320)
- return changegroupsubset(repo, basenodes, repo.heads(), source)
-
def _addchangegroupfiles(repo, source, revmap, trp, expectedfiles, needfiles):
revisions = 0
files = 0
@@ -983,7 +973,8 @@
fl = repo.file(f)
o = len(fl)
try:
- if not fl.addgroup(source, revmap, trp):
+ deltas = source.deltaiter()
+ if not fl.addgroup(deltas, revmap, trp):
raise error.Abort(_("received file revlog group is empty"))
except error.CensoredBaseError as e:
raise error.Abort(_("received delta base is censored: %s") % e)
--- a/mercurial/changelog.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/changelog.py Sat Sep 30 07:52:48 2017 -0700
@@ -27,8 +27,9 @@
def _string_escape(text):
"""
- >>> d = {'nl': chr(10), 'bs': chr(92), 'cr': chr(13), 'nul': chr(0)}
- >>> s = "ab%(nl)scd%(bs)s%(bs)sn%(nul)sab%(cr)scd%(bs)s%(nl)s" % d
+ >>> from .pycompat import bytechr as chr
+ >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
+ >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)sab%(cr)scd%(bs)s%(nl)s" % d
>>> s
'ab\\ncd\\\\\\\\n\\x00ab\\rcd\\\\\\n'
>>> res = _string_escape(s)
@@ -41,12 +42,13 @@
def decodeextra(text):
"""
- >>> sorted(decodeextra(encodeextra({'foo': 'bar', 'baz': chr(0) + '2'})
- ... ).iteritems())
+ >>> from .pycompat import bytechr as chr
+ >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
+ ... ).items())
[('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
- >>> sorted(decodeextra(encodeextra({'foo': 'bar',
- ... 'baz': chr(92) + chr(0) + '2'})
- ... ).iteritems())
+ >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
+ ... b'baz': chr(92) + chr(0) + b'2'})
+ ... ).items())
[('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
"""
extra = _defaultextra.copy()
@@ -275,7 +277,7 @@
datafile = '00changelog.d'
revlog.revlog.__init__(self, opener, indexfile, datafile=datafile,
- checkambig=True)
+ checkambig=True, mmaplargeindex=True)
if self._initempty:
# changelogs don't benefit from generaldelta
--- a/mercurial/chgserver.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/chgserver.py Sat Sep 30 07:52:48 2017 -0700
@@ -565,8 +565,11 @@
self._hashstate, self._baseaddress)
def chgunixservice(ui, repo, opts):
- # CHGINTERNALMARK is temporarily set by chg client to detect if chg will
- # start another chg. drop it to avoid possible side effects.
+ # CHGINTERNALMARK is set by chg client. It is an indication of things are
+ # started by chg so other code can do things accordingly, like disabling
+ # demandimport or detecting chg client started by chg client. When executed
+ # here, CHGINTERNALMARK is no longer useful and hence dropped to make
+ # environ cleaner.
if 'CHGINTERNALMARK' in encoding.environ:
del encoding.environ['CHGINTERNALMARK']
--- a/mercurial/cmdutil.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/cmdutil.py Sat Sep 30 07:52:48 2017 -0700
@@ -35,7 +35,6 @@
obsolete,
patch,
pathutil,
- phases,
pycompat,
registrar,
revlog,
@@ -123,6 +122,8 @@
_('ignore changes in the amount of white space')),
('B', 'ignore-blank-lines', None,
_('ignore changes whose lines are all blank')),
+ ('Z', 'ignore-space-at-eol', None,
+ _('ignore changes in whitespace at EOL')),
]
diffopts2 = [
@@ -277,7 +278,7 @@
# 1. filter patch, since we are intending to apply subset of it
try:
chunks, newopts = filterfn(ui, originalchunks)
- except patch.PatchError as err:
+ except error.PatchError as err:
raise error.Abort(_('error parsing patch: %s') % err)
opts.update(newopts)
@@ -339,7 +340,7 @@
+ crecordmod.patchhelptext
+ fp.read())
reviewedpatch = ui.edit(patchtext, "",
- extra={"suffix": ".diff"},
+ action="diff",
repopath=repo.path)
fp.truncate(0)
fp.write(reviewedpatch)
@@ -359,7 +360,7 @@
ui.debug('applying patch\n')
ui.debug(fp.getvalue())
patch.internalpatch(ui, repo, fp, 1, eolmode=None)
- except patch.PatchError as err:
+ except error.PatchError as err:
raise error.Abort(str(err))
del fp
@@ -467,12 +468,12 @@
return True
def isignoreddir(localpath):
- """
- This function checks whether the directory contains only ignored files
- and hence should the directory be considered ignored. Returns True, if
- that should be ignored otherwise False.
+ """Return True if `localpath` directory is ignored or contains only
+ ignored files and should hence be considered ignored.
"""
dirpath = os.path.join(root, localpath)
+ if ignorefn(dirpath):
+ return True
for f in os.listdir(dirpath):
filepath = os.path.join(dirpath, f)
if os.path.isdir(filepath):
@@ -535,7 +536,7 @@
rs = []
newls = []
- for par, files in pardict.iteritems():
+ for par, files in sorted(pardict.iteritems()):
lenpar = numfiles(par)
if lenpar == len(files):
newls.append(par)
@@ -573,6 +574,112 @@
return finalrs
+def _commentlines(raw):
+ '''Surround lineswith a comment char and a new line'''
+ lines = raw.splitlines()
+ commentedlines = ['# %s' % line for line in lines]
+ return '\n'.join(commentedlines) + '\n'
+
+def _conflictsmsg(repo):
+ # avoid merge cycle
+ from . import merge as mergemod
+ mergestate = mergemod.mergestate.read(repo)
+ if not mergestate.active():
+ return
+
+ m = scmutil.match(repo[None])
+ unresolvedlist = [f for f in mergestate.unresolved() if m(f)]
+ if unresolvedlist:
+ mergeliststr = '\n'.join(
+ [' %s' % os.path.relpath(
+ os.path.join(repo.root, path),
+ pycompat.getcwd()) for path in unresolvedlist])
+ msg = _('''Unresolved merge conflicts:
+
+%s
+
+To mark files as resolved: hg resolve --mark FILE''') % mergeliststr
+ else:
+ msg = _('No unresolved merge conflicts.')
+
+ return _commentlines(msg)
+
+def _helpmessage(continuecmd, abortcmd):
+ msg = _('To continue: %s\n'
+ 'To abort: %s') % (continuecmd, abortcmd)
+ return _commentlines(msg)
+
+def _rebasemsg():
+ return _helpmessage('hg rebase --continue', 'hg rebase --abort')
+
+def _histeditmsg():
+ return _helpmessage('hg histedit --continue', 'hg histedit --abort')
+
+def _unshelvemsg():
+ return _helpmessage('hg unshelve --continue', 'hg unshelve --abort')
+
+def _updatecleanmsg(dest=None):
+ warning = _('warning: this will discard uncommitted changes')
+ return 'hg update --clean %s (%s)' % (dest or '.', warning)
+
+def _graftmsg():
+ # tweakdefaults requires `update` to have a rev hence the `.`
+ return _helpmessage('hg graft --continue', _updatecleanmsg())
+
+def _mergemsg():
+ # tweakdefaults requires `update` to have a rev hence the `.`
+ return _helpmessage('hg commit', _updatecleanmsg())
+
+def _bisectmsg():
+ msg = _('To mark the changeset good: hg bisect --good\n'
+ 'To mark the changeset bad: hg bisect --bad\n'
+ 'To abort: hg bisect --reset\n')
+ return _commentlines(msg)
+
+def fileexistspredicate(filename):
+ return lambda repo: repo.vfs.exists(filename)
+
+def _mergepredicate(repo):
+ return len(repo[None].parents()) > 1
+
+STATES = (
+ # (state, predicate to detect states, helpful message function)
+ ('histedit', fileexistspredicate('histedit-state'), _histeditmsg),
+ ('bisect', fileexistspredicate('bisect.state'), _bisectmsg),
+ ('graft', fileexistspredicate('graftstate'), _graftmsg),
+ ('unshelve', fileexistspredicate('unshelverebasestate'), _unshelvemsg),
+ ('rebase', fileexistspredicate('rebasestate'), _rebasemsg),
+ # The merge state is part of a list that will be iterated over.
+ # They need to be last because some of the other unfinished states may also
+ # be in a merge or update state (eg. rebase, histedit, graft, etc).
+ # We want those to have priority.
+ ('merge', _mergepredicate, _mergemsg),
+)
+
+def _getrepostate(repo):
+ # experimental config: commands.status.skipstates
+ skip = set(repo.ui.configlist('commands', 'status.skipstates'))
+ for state, statedetectionpredicate, msgfn in STATES:
+ if state in skip:
+ continue
+ if statedetectionpredicate(repo):
+ return (state, statedetectionpredicate, msgfn)
+
+def morestatus(repo, fm):
+ statetuple = _getrepostate(repo)
+ label = 'status.morestatus'
+ if statetuple:
+ fm.startitem()
+ state, statedetectionpredicate, helpfulmsg = statetuple
+ statemsg = _('The repository is in an unfinished *%s* state.') % state
+ fm.write('statemsg', '%s\n', _commentlines(statemsg), label=label)
+ conmsg = _conflictsmsg(repo)
+ if conmsg:
+ fm.write('conflictsmsg', '%s\n', conmsg, label=label)
+ if helpfulmsg:
+ helpmsg = helpfulmsg()
+ fm.write('helpmsg', '%s\n', helpmsg, label=label)
+
def findpossible(cmd, table, strict=False):
"""
Return cmd -> (aliases, command table entry)
@@ -669,7 +776,7 @@
message = '\n'.join(util.readfile(logfile).splitlines())
except IOError as inst:
raise error.Abort(_("can't read commit message '%s': %s") %
- (logfile, inst.strerror))
+ (logfile, encoding.strtolocal(inst.strerror)))
return message
def mergeeditform(ctxorbool, baseformname):
@@ -991,7 +1098,7 @@
srcexists = False
else:
ui.warn(_('%s: cannot copy - %s\n') %
- (relsrc, inst.strerror))
+ (relsrc, encoding.strtolocal(inst.strerror)))
return True # report a failure
if ui.verbose or not exact:
@@ -1227,7 +1334,7 @@
try:
patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
files=files, eolmode=None, similarity=sim / 100.0)
- except patch.PatchError as e:
+ except error.PatchError as e:
if not partial:
raise error.Abort(str(e))
if partial:
@@ -1273,7 +1380,7 @@
try:
patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
files, eolmode=None)
- except patch.PatchError as e:
+ except error.PatchError as e:
raise error.Abort(str(e))
if opts.get('exact'):
editor = None
@@ -1465,10 +1572,10 @@
labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
if ctx.obsolete():
labels.append('changeset.obsolete')
- if ctx.troubled():
- labels.append('changeset.troubled')
- for trouble in ctx.troubles():
- labels.append('trouble.%s' % trouble)
+ if ctx.isunstable():
+ labels.append('changeset.unstable')
+ for instability in ctx.instabilities():
+ labels.append('instability.%s' % instability)
return ' '.join(labels)
class changeset_printer(object):
@@ -1516,22 +1623,16 @@
'''show a single changeset or file revision'''
changenode = ctx.node()
rev = ctx.rev()
- if self.ui.debugflag:
- hexfunc = hex
- else:
- hexfunc = short
- # as of now, wctx.node() and wctx.rev() return None, but we want to
- # show the same values as {node} and {rev} templatekw
- revnode = (scmutil.intrev(ctx), hexfunc(scmutil.binnode(ctx)))
if self.ui.quiet:
- self.ui.write("%d:%s\n" % revnode, label='log.node')
+ self.ui.write("%s\n" % scmutil.formatchangeid(ctx),
+ label='log.node')
return
date = util.datestr(ctx.date())
# i18n: column positioning for "hg log"
- self.ui.write(_("changeset: %d:%s\n") % revnode,
+ self.ui.write(_("changeset: %s\n") % scmutil.formatchangeid(ctx),
label=_changesetlabels(ctx))
# branches are shown first before any other names due to backwards
@@ -1560,16 +1661,15 @@
for pctx in scmutil.meaningfulparents(self.repo, ctx):
label = 'log.parent changeset.%s' % pctx.phasestr()
# i18n: column positioning for "hg log"
- self.ui.write(_("parent: %d:%s\n")
- % (pctx.rev(), hexfunc(pctx.node())),
+ self.ui.write(_("parent: %s\n") % scmutil.formatchangeid(pctx),
label=label)
if self.ui.debugflag and rev is not None:
mnode = ctx.manifestnode()
+ mrev = self.repo.manifestlog._revlog.rev(mnode)
# i18n: column positioning for "hg log"
- self.ui.write(_("manifest: %d:%s\n") %
- (self.repo.manifestlog._revlog.rev(mnode),
- hex(mnode)),
+ self.ui.write(_("manifest: %s\n")
+ % scmutil.formatrevnode(self.ui, mrev, mnode),
label='ui.debug log.manifest')
# i18n: column positioning for "hg log"
self.ui.write(_("user: %s\n") % ctx.user(),
@@ -1578,10 +1678,11 @@
self.ui.write(_("date: %s\n") % date,
label='log.date')
- if ctx.troubled():
+ if ctx.isunstable():
# i18n: column positioning for "hg log"
- self.ui.write(_("trouble: %s\n") % ', '.join(ctx.troubles()),
- label='log.trouble')
+ instabilities = ctx.instabilities()
+ self.ui.write(_("instability: %s\n") % ', '.join(instabilities),
+ label='log.instability')
self._exthook(ctx)
@@ -1893,19 +1994,19 @@
regular display via changeset_printer() is done.
"""
# options
- matchfn = None
+ match = None
if opts.get('patch') or opts.get('stat'):
- matchfn = scmutil.matchall(repo)
+ match = scmutil.matchall(repo)
if opts.get('template') == 'json':
- return jsonchangeset(ui, repo, matchfn, opts, buffered)
+ return jsonchangeset(ui, repo, match, opts, buffered)
spec = _lookuplogtemplate(ui, opts.get('template'), opts.get('style'))
if not spec.ref and not spec.tmpl and not spec.mapfile:
- return changeset_printer(ui, repo, matchfn, opts, buffered)
-
- return changeset_templater(ui, repo, spec, matchfn, opts, buffered)
+ return changeset_printer(ui, repo, match, opts, buffered)
+
+ return changeset_templater(ui, repo, spec, match, opts, buffered)
def showmarker(fm, marker, index=None):
"""utility function to display obsolescence marker in a readable way
@@ -1913,7 +2014,7 @@
To be used by debug function."""
if index is not None:
fm.write('index', '%i ', index)
- fm.write('precnode', '%s ', hex(marker.precnode()))
+ fm.write('prednode', '%s ', hex(marker.prednode()))
succs = marker.succnodes()
fm.condwrite(succs, 'succnodes', '%s ',
fm.formatlist(map(hex, succs), name='node'))
@@ -2449,7 +2550,7 @@
if not (revs.isdescending() or revs.istopo()):
revs.sort(reverse=True)
if expr:
- matcher = revset.match(repo.ui, expr, order=revset.followorder)
+ matcher = revset.match(repo.ui, expr)
revs = matcher(repo, revs)
if limit is not None:
limitedrevs = []
@@ -2475,7 +2576,7 @@
return smartset.baseset([]), None, None
expr, filematcher = _makelogrevset(repo, pats, opts, revs)
if expr:
- matcher = revset.match(repo.ui, expr, order=revset.followorder)
+ matcher = revset.match(repo.ui, expr)
revs = matcher(repo, revs)
if limit is not None:
limitedrevs = []
@@ -2509,7 +2610,8 @@
return formatnode
def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
- filematcher=None):
+ filematcher=None, props=None):
+ props = props or {}
formatnode = _graphnodeformatter(ui, displayer)
state = graphmod.asciistate()
styles = state['styles']
@@ -2546,14 +2648,18 @@
revmatchfn = None
if filematcher is not None:
revmatchfn = filematcher(ctx.rev())
- displayer.show(ctx, copies=copies, matchfn=revmatchfn)
+ edges = edgefn(type, char, state, rev, parents)
+ firstedge = next(edges)
+ width = firstedge[2]
+ displayer.show(ctx, copies=copies, matchfn=revmatchfn,
+ _graphwidth=width, **props)
lines = displayer.hunk.pop(rev).split('\n')
if not lines[-1]:
del lines[-1]
displayer.flush(ctx)
- edges = edgefn(type, char, lines, state, rev, parents)
- for type, char, lines, coldata in edges:
+ for type, char, width, coldata in itertools.chain([firstedge], edges):
graphmod.ascii(ui, state, type, char, lines, coldata)
+ lines = []
displayer.close()
def graphlog(ui, repo, pats, opts):
@@ -2602,8 +2708,8 @@
dirstate = repo.dirstate
# We don't want to just call wctx.walk here, since it would return a lot of
# clean files, which we aren't interested in and takes time.
- for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
- True, False, full=False)):
+ for f in sorted(dirstate.walk(badmatch, subrepos=sorted(wctx.substate),
+ unknown=True, ignored=False, full=False)):
exact = match.exact(f)
if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
if cca:
@@ -2892,20 +2998,15 @@
dsguard = None
# extract addremove carefully -- this function can be called from a command
# that doesn't support addremove
- try:
- if opts.get('addremove'):
- dsguard = dirstateguard.dirstateguard(repo, 'commit')
+ if opts.get('addremove'):
+ dsguard = dirstateguard.dirstateguard(repo, 'commit')
+ with dsguard or util.nullcontextmanager():
+ if dsguard:
if scmutil.addremove(repo, matcher, "", opts) != 0:
raise error.Abort(
_("failed to mark all new/missing files as added/removed"))
- r = commitfunc(ui, repo, message, matcher, opts)
- if dsguard:
- dsguard.close()
- return r
- finally:
- if dsguard:
- dsguard.release()
+ return commitfunc(ui, repo, message, matcher, opts)
def samefile(f, ctx1, ctx2):
if f in ctx1.manifest():
@@ -2919,7 +3020,7 @@
else:
return f not in ctx2.manifest()
-def amend(ui, repo, commitfunc, old, extra, pats, opts):
+def amend(ui, repo, old, extra, pats, opts):
# avoid cycle context -> subrepo -> cmdutil
from . import context
@@ -2931,44 +3032,29 @@
ui.note(_('amending changeset %s\n') % old)
base = old.p1()
- newid = None
with repo.wlock(), repo.lock(), repo.transaction('amend'):
- # See if we got a message from -m or -l, if not, open the editor
- # with the message of the changeset to amend
- message = logmessage(ui, opts)
- # ensure logfile does not conflict with later enforcement of the
- # message. potential logfile content has been processed by
- # `logmessage` anyway.
- opts.pop('logfile')
- # First, do a regular commit to record all changes in the working
- # directory (if there are any)
- ui.callhooks = False
- activebookmark = repo._bookmarks.active
- try:
- repo._bookmarks.active = None
- opts['message'] = 'temporary amend commit for %s' % old
- node = commit(ui, repo, commitfunc, pats, opts)
- finally:
- repo._bookmarks.active = activebookmark
- ui.callhooks = True
- ctx = repo[node]
-
# Participating changesets:
#
- # node/ctx o - new (intermediate) commit that contains changes
- # | from working dir to go into amending commit
- # | (or a workingctx if there were no changes)
+ # wctx o - workingctx that contains changes from working copy
+ # | to go into amending commit
# |
# old o - changeset to amend
# |
- # base o - parent of amending changeset
+ # base o - first parent of the changeset to amend
+ wctx = repo[None]
# Update extra dict from amended commit (e.g. to preserve graft
# source)
extra.update(old.extra())
- # Also update it from the intermediate commit or from the wctx
- extra.update(ctx.extra())
+ # Also update it from the from the wctx
+ extra.update(wctx.extra())
+
+ user = opts.get('user') or old.user()
+ date = opts.get('date') or old.date()
+
+ # Parse the date to allow comparison between date and old.date()
+ date = util.parsedate(date)
if len(old.parents()) > 1:
# ctx.files() isn't reliable for merges, so fall back to the
@@ -2978,30 +3064,47 @@
else:
files = set(old.files())
- # Second, we use either the commit we just did, or if there were no
- # changes the parent of the working directory as the version of the
- # files in the final amend commit
- if node:
- ui.note(_('copying changeset %s to %s\n') % (ctx, base))
-
- user = ctx.user()
- date = ctx.date()
+ # add/remove the files to the working copy if the "addremove" option
+ # was specified.
+ matcher = scmutil.match(wctx, pats, opts)
+ if (opts.get('addremove')
+ and scmutil.addremove(repo, matcher, "", opts)):
+ raise error.Abort(
+ _("failed to mark all new/missing files as added/removed"))
+
+ filestoamend = set(f for f in wctx.files() if matcher(f))
+
+ changes = (len(filestoamend) > 0)
+ if changes:
# Recompute copies (avoid recording a -> b -> a)
- copied = copies.pathcopies(base, ctx)
+ copied = copies.pathcopies(base, wctx, matcher)
if old.p2:
- copied.update(copies.pathcopies(old.p2(), ctx))
+ copied.update(copies.pathcopies(old.p2(), wctx, matcher))
# Prune files which were reverted by the updates: if old
- # introduced file X and our intermediate commit, node,
- # renamed that file, then those two files are the same and
+ # introduced file X and the file was renamed in the working
+ # copy, then those two files are the same and
# we can discard X from our list of files. Likewise if X
# was deleted, it's no longer relevant
- files.update(ctx.files())
- files = [f for f in files if not samefile(f, ctx, base)]
+ files.update(filestoamend)
+ files = [f for f in files if not samefile(f, wctx, base)]
def filectxfn(repo, ctx_, path):
try:
- fctx = ctx[path]
+ # If the file being considered is not amongst the files
+ # to be amended, we should return the file context from the
+ # old changeset. This avoids issues when only some files in
+ # the working copy are being amended but there are also
+ # changes to other files from the old changeset.
+ if path not in filestoamend:
+ return old.filectx(path)
+
+ fctx = wctx[path]
+
+ # Return None for removed files.
+ if not fctx.exists():
+ return None
+
flags = fctx.flags()
mctx = context.memfilectx(repo,
fctx.path(), fctx.data(),
@@ -3021,11 +3124,14 @@
except KeyError:
return None
- user = opts.get('user') or old.user()
- date = opts.get('date') or old.date()
+ # See if we got a message from -m or -l, if not, open the editor with
+ # the message of the changeset to amend.
+ message = logmessage(ui, opts)
+
editform = mergeeditform(old, 'commit.amend')
editor = getcommiteditor(editform=editform,
**pycompat.strkwargs(opts))
+
if not message:
editor = getcommiteditor(edit=True, editform=editform)
message = old.description()
@@ -3044,7 +3150,7 @@
editor=editor)
newdesc = changelog.stripdesc(new.description())
- if ((not node)
+ if ((not changes)
and newdesc == old.description()
and user == old.user()
and date == old.date()
@@ -3055,23 +3161,38 @@
# This not what we expect from amend.
return old.node()
- ph = repo.ui.config('phases', 'new-commit', phases.draft)
- try:
- if opts.get('secret'):
- commitphase = 'secret'
- else:
- commitphase = old.phase()
- repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
+ if opts.get('secret'):
+ commitphase = 'secret'
+ else:
+ commitphase = old.phase()
+ overrides = {('phases', 'new-commit'): commitphase}
+ with ui.configoverride(overrides, 'amend'):
newid = repo.commitctx(new)
- finally:
- repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
- if newid != old.node():
- # Reroute the working copy parent to the new changeset
- repo.setparents(newid, nullid)
- mapping = {old.node(): (newid,)}
- if node:
- mapping[node] = ()
- scmutil.cleanupnodes(repo, mapping, 'amend')
+
+ # Reroute the working copy parent to the new changeset
+ repo.setparents(newid, nullid)
+ mapping = {old.node(): (newid,)}
+ scmutil.cleanupnodes(repo, mapping, 'amend')
+
+ # Fixing the dirstate because localrepo.commitctx does not update
+ # it. This is rather convenient because we did not need to update
+ # the dirstate for all the files in the new commit which commitctx
+ # could have done if it updated the dirstate. Now, we can
+ # selectively update the dirstate only for the amended files.
+ dirstate = repo.dirstate
+
+ # Update the state of the files which were added and
+ # and modified in the amend to "normal" in the dirstate.
+ normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
+ for f in normalfiles:
+ dirstate.normal(f)
+
+ # Update the state of files which were removed in the amend
+ # to "removed" in the dirstate.
+ removedfiles = set(wctx.removed()) & filestoamend
+ for f in removedfiles:
+ dirstate.drop(f)
+
return newid
def commiteditor(repo, ctx, subs, editform=''):
@@ -3109,7 +3230,7 @@
editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
editform=editform, pending=pending,
- repopath=repo.path)
+ repopath=repo.path, action='commit')
text = editortext
# strip away anything below this special string (used for editors that want
@@ -3601,7 +3722,7 @@
if reversehunks:
chunks = patch.reversehunks(chunks)
- except patch.PatchError as err:
+ except error.PatchError as err:
raise error.Abort(_('error parsing patch: %s') % err)
newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
@@ -3623,7 +3744,7 @@
if dopatch:
try:
patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
- except patch.PatchError as err:
+ except error.PatchError as err:
raise error.Abort(str(err))
del fp
else:
--- a/mercurial/color.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/color.py Sat Sep 30 07:52:48 2017 -0700
@@ -130,7 +130,7 @@
def loadcolortable(ui, extname, colortable):
_defaultstyles.update(colortable)
-def _terminfosetup(ui, mode):
+def _terminfosetup(ui, mode, formatted):
'''Initialize terminfo data and the terminal if we're in terminfo mode.'''
# If we failed to load curses, we go ahead and return.
@@ -164,8 +164,8 @@
del ui._terminfoparams[key]
if not curses.tigetstr('setaf') or not curses.tigetstr('setab'):
# Only warn about missing terminfo entries if we explicitly asked for
- # terminfo mode.
- if mode == "terminfo":
+ # terminfo mode and we're in a formatted terminal.
+ if mode == "terminfo" and formatted:
ui.warn(_("no terminfo entry for setab/setaf: reverting to "
"ECMA-48 color\n"))
ui._terminfoparams.clear()
@@ -242,7 +242,7 @@
def modewarn():
# only warn if color.mode was explicitly set and we're in
# a formatted terminal
- if mode == realmode and ui.formatted():
+ if mode == realmode and formatted:
ui.warn(_('warning: failed to set color mode to %s\n') % mode)
if realmode == 'win32':
@@ -253,7 +253,7 @@
elif realmode == 'ansi':
ui._terminfoparams.clear()
elif realmode == 'terminfo':
- _terminfosetup(ui, mode)
+ _terminfosetup(ui, mode, formatted)
if not ui._terminfoparams:
## FIXME Shouldn't we return None in this case too?
modewarn()
@@ -320,10 +320,10 @@
def _mergeeffects(text, start, stop):
"""Insert start sequence at every occurrence of stop sequence
- >>> s = _mergeeffects('cyan', '[C]', '|')
- >>> s = _mergeeffects(s + 'yellow', '[Y]', '|')
- >>> s = _mergeeffects('ma' + s + 'genta', '[M]', '|')
- >>> s = _mergeeffects('red' + s, '[R]', '|')
+ >>> s = _mergeeffects(b'cyan', b'[C]', b'|')
+ >>> s = _mergeeffects(s + b'yellow', b'[Y]', b'|')
+ >>> s = _mergeeffects(b'ma' + s + b'genta', b'[M]', b'|')
+ >>> s = _mergeeffects(b'red' + s, b'[R]', b'|')
>>> s
'[R]red[M]ma[Y][C]cyan|[R][M][Y]yellow|[R][M]genta|'
"""
--- a/mercurial/commands.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/commands.py Sat Sep 30 07:52:48 2017 -0700
@@ -477,9 +477,9 @@
prefix = os.path.basename(repo.root) + '-%h'
prefix = cmdutil.makefilename(repo, prefix, node)
- matchfn = scmutil.match(ctx, [], opts)
+ match = scmutil.match(ctx, [], opts)
archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
- matchfn, prefix, subrepos=opts.get('subrepos'))
+ match, prefix, subrepos=opts.get('subrepos'))
@command('backout',
[('', 'merge', None, _('merge with old dirstate parent after backout')),
@@ -917,6 +917,9 @@
diverged, a new 'divergent bookmark' of the form 'name@path' will
be created. Using :hg:`merge` will resolve the divergence.
+ Specifying bookmark as '.' to -m or -d options is equivalent to specifying
+ the active bookmark's name.
+
A bookmark named '@' has the special property that :hg:`clone` will
check it out by default if it exists.
@@ -962,12 +965,14 @@
if delete or rename or names or inactive:
with repo.wlock(), repo.lock(), repo.transaction('bookmark') as tr:
if delete:
+ names = pycompat.maplist(repo._bookmarks.expandname, names)
bookmarks.delete(repo, tr, names)
elif rename:
if not names:
raise error.Abort(_("new bookmark name required"))
elif len(names) > 1:
raise error.Abort(_("only one new bookmark name allowed"))
+ rename = repo._bookmarks.expandname(rename)
bookmarks.rename(repo, tr, rename, names[0], force, inactive)
elif names:
bookmarks.addbookmarks(repo, tr, names, rev, force, inactive)
@@ -1072,7 +1077,10 @@
allheads = set(repo.heads())
branches = []
for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
- isactive = not isclosed and bool(set(heads) & allheads)
+ isactive = False
+ if not isclosed:
+ openheads = set(repo.branchmap().iteropen(heads))
+ isactive = bool(openheads & allheads)
branches.append((tag, repo[tip], isactive, not isclosed))
branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
reverse=True)
@@ -1227,7 +1235,7 @@
contentopts = {'cg.version': cgversion}
- if repo.ui.configbool('experimental', 'evolution.bundle-obsmarker'):
+ if repo.ui.configbool('experimental', 'stabilization.bundle-obsmarker'):
contentopts['obsolescence'] = True
if repo.ui.configbool('experimental', 'bundle-phases'):
contentopts['phases'] = True
@@ -1542,15 +1550,7 @@
if not obsolete.isenabled(repo, obsolete.createmarkersopt):
cmdutil.checkunfinished(repo)
- # commitfunc is used only for temporary amend commit by cmdutil.amend
- def commitfunc(ui, repo, message, match, opts):
- return repo.commit(message,
- opts.get('user') or old.user(),
- opts.get('date') or old.date(),
- match,
- extra=extra)
-
- node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
+ node = cmdutil.amend(ui, repo, old, extra, pats, opts)
if node == old.node():
ui.status(_("nothing changed\n"))
return 1
@@ -1644,8 +1644,8 @@
samplehgrc = uimod.samplehgrcs['user']
f = paths[0]
- fp = open(f, "w")
- fp.write(samplehgrc)
+ fp = open(f, "wb")
+ fp.write(util.tonativeeol(samplehgrc))
fp.close()
editor = ui.geteditor()
@@ -2150,7 +2150,7 @@
skipped = set()
# check for merges
for rev in repo.revs('%ld and merge()', revs):
- ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
+ ui.warn(_('skipping ungraftable merge revision %d\n') % rev)
skipped.add(rev)
revs = [r for r in revs if r not in skipped]
if not revs:
@@ -2481,7 +2481,7 @@
skip = {}
revfiles = {}
- matchfn = scmutil.match(repo[None], pats, opts)
+ match = scmutil.match(repo[None], pats, opts)
found = False
follow = opts.get('follow')
@@ -2522,7 +2522,7 @@
ui.pager('grep')
fm = ui.formatter('grep', opts)
- for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
+ for ctx in cmdutil.walkchangerevs(repo, match, opts, prep):
rev = ctx.rev()
parent = ctx.p1().rev()
for fn in sorted(revfiles.get(rev, [])):
@@ -3844,7 +3844,7 @@
"merge)\n"))
else:
ui.status(_("(run 'hg heads' to see heads)\n"))
- else:
+ elif not ui.configbool('commands', 'update.requiredest'):
ui.status(_("(run 'hg update' to get a working copy)\n"))
@command('^pull',
@@ -3972,6 +3972,7 @@
('b', 'branch', [],
_('a specific branch you would like to push'), _('BRANCH')),
('', 'new-branch', False, _('allow pushing a new branch')),
+ ('', 'pushvars', [], _('variables that can be sent to server (ADVANCED)')),
] + remoteopts,
_('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
def push(ui, repo, dest=None, **opts):
@@ -4009,6 +4010,25 @@
Please see :hg:`help urls` for important details about ``ssh://``
URLs. If DESTINATION is omitted, a default path will be used.
+ .. container:: verbose
+
+ The --pushvars option sends strings to the server that become
+ environment variables prepended with ``HG_USERVAR_``. For example,
+ ``--pushvars ENABLE_FEATURE=true``, provides the server side hooks with
+ ``HG_USERVAR_ENABLE_FEATURE=true`` as part of their environment.
+
+ pushvars can provide for user-overridable hooks as well as set debug
+ levels. One example is having a hook that blocks commits containing
+ conflict markers, but enables the user to override the hook if the file
+ is using conflict markers for testing purposes or the file format has
+ strings that look like conflict markers.
+
+ By default, servers will ignore `--pushvars`. To enable it add the
+ following to your configuration file::
+
+ [push]
+ pushvars.server = true
+
Returns 0 if push was successful, 1 if nothing to push.
"""
@@ -4061,10 +4081,14 @@
return not result
finally:
del repo._subtoppath
+
+ opargs = dict(opts.get('opargs', {})) # copy opargs since we may mutate it
+ opargs.setdefault('pushvars', []).extend(opts.get('pushvars', []))
+
pushop = exchange.push(repo, other, opts.get('force'), revs=revs,
newbranch=opts.get('new_branch'),
bookmarks=opts.get('bookmark', ()),
- opargs=opts.get('opargs'))
+ opargs=opargs)
result = not pushop.cgresult
@@ -4675,6 +4699,19 @@
files are not considered while tersing until 'i' is there in --terse value
or the --ignored option is used.
+ --verbose option shows more context about the state of the repo
+ like the repository is in unfinised merge, shelve, rebase state etc.
+ You can have this behaviour turned on by default by following config:
+
+ [commands]
+ status.verbose = true
+
+ You can also skip some states like bisect by adding following in
+ configuration file.
+
+ [commands]
+ status.skipstates = bisect
+
Examples:
- show changes in the working directory relative to a
@@ -4764,6 +4801,10 @@
if f in copy:
fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
label='status.copied')
+
+ if ((ui.verbose or ui.configbool('commands', 'status.verbose'))
+ and not ui.plain()):
+ cmdutil.morestatus(repo, fm)
fm.end()
@command('^summary|sum',
@@ -4814,10 +4855,11 @@
ui.write(_(' (no revision checked out)'))
if p.obsolete():
ui.write(_(' (obsolete)'))
- if p.troubled():
+ if p.isunstable():
+ instabilities = (ui.label(instability, 'trouble.%s' % instability)
+ for instability in p.instabilities())
ui.write(' ('
- + ', '.join(ui.label(trouble, 'trouble.%s' % trouble)
- for trouble in p.troubles())
+ + ', '.join(instabilities)
+ ')')
ui.write('\n')
if p.description():
@@ -4939,13 +4981,13 @@
ui.status(_('phases: %s\n') % ', '.join(t))
if obsolete.isenabled(repo, obsolete.createmarkersopt):
- for trouble in ("unstable", "divergent", "bumped"):
+ for trouble in ("orphan", "contentdivergent", "phasedivergent"):
numtrouble = len(repo.revs(trouble + "()"))
# We write all the possibilities to ease translation
troublemsg = {
- "unstable": _("unstable: %d changesets"),
- "divergent": _("divergent: %d changesets"),
- "bumped": _("bumped: %d changesets"),
+ "orphan": _("orphan: %d changesets"),
+ "contentdivergent": _("content-divergent: %d changesets"),
+ "phasedivergent": _("phase-divergent: %d changesets"),
}
if numtrouble > 0:
ui.status(troublemsg[trouble] % numtrouble + "\n")
--- a/mercurial/compat.h Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/compat.h Sat Sep 30 07:52:48 2017 -0700
@@ -7,8 +7,10 @@
#define inline __inline
#if defined(_WIN64)
typedef __int64 ssize_t;
+typedef unsigned __int64 uintptr_t;
#else
typedef int ssize_t;
+typedef unsigned int uintptr_t;
#endif
typedef signed char int8_t;
typedef short int16_t;
--- a/mercurial/config.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/config.py Sat Sep 30 07:52:48 2017 -0700
@@ -20,13 +20,14 @@
class config(object):
def __init__(self, data=None, includepaths=None):
self._data = {}
- self._source = {}
self._unset = []
self._includepaths = includepaths or []
if data:
for k in data._data:
self._data[k] = data[k].copy()
self._source = data._source.copy()
+ else:
+ self._source = util.cowdict()
def copy(self):
return config(self)
def __contains__(self, section):
@@ -39,13 +40,19 @@
for d in self.sections():
yield d
def update(self, src):
+ self._source = self._source.preparewrite()
for s, n in src._unset:
- if s in self and n in self._data[s]:
+ ds = self._data.get(s, None)
+ if ds is not None and n in ds:
+ self._data[s] = ds.preparewrite()
del self._data[s][n]
del self._source[(s, n)]
for s in src:
- if s not in self:
- self._data[s] = util.sortdict()
+ ds = self._data.get(s, None)
+ if ds:
+ self._data[s] = ds.preparewrite()
+ else:
+ self._data[s] = util.cowsortdict()
self._data[s].update(src._data[s])
self._source.update(src._source)
def get(self, section, item, default=None):
@@ -74,16 +81,21 @@
assert not isinstance(value, str), (
'config values may not be unicode strings on Python 3')
if section not in self:
- self._data[section] = util.sortdict()
+ self._data[section] = util.cowsortdict()
+ else:
+ self._data[section] = self._data[section].preparewrite()
self._data[section][item] = value
if source:
+ self._source = self._source.preparewrite()
self._source[(section, item)] = source
def restore(self, data):
"""restore data returned by self.backup"""
+ self._source = self._source.preparewrite()
if len(data) == 4:
# restore old data
section, item, value, source = data
+ self._data[section] = self._data[section].preparewrite()
self._data[section][item] = value
self._source[(section, item)] = source
else:
@@ -149,7 +161,7 @@
if remap:
section = remap.get(section, section)
if section not in self:
- self._data[section] = util.sortdict()
+ self._data[section] = util.cowsortdict()
continue
m = itemre.match(l)
if m:
@@ -183,7 +195,7 @@
def parselist(value):
"""parse a configuration value as a list of comma/space separated strings
- >>> parselist('this,is "a small" ,test')
+ >>> parselist(b'this,is "a small" ,test')
['this', 'is', 'a small', 'test']
"""
--- a/mercurial/configitems.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/configitems.py Sat Sep 30 07:52:48 2017 -0700
@@ -10,6 +10,7 @@
import functools
from . import (
+ encoding,
error,
)
@@ -97,6 +98,12 @@
coreconfigitem('commands', 'status.relative',
default=False,
)
+coreconfigitem('commands', 'status.skipstates',
+ default=[],
+)
+coreconfigitem('commands', 'status.verbose',
+ default=False,
+)
coreconfigitem('commands', 'update.requiredest',
default=False,
)
@@ -166,23 +173,29 @@
coreconfigitem('experimental', 'clientcompressionengines',
default=list,
)
+coreconfigitem('experimental', 'copytrace',
+ default='on',
+)
+coreconfigitem('experimental', 'copytrace.sourcecommitlimit',
+ default=100,
+)
coreconfigitem('experimental', 'crecordtest',
default=None,
)
-coreconfigitem('experimental', 'disablecopytrace',
- default=False,
-)
coreconfigitem('experimental', 'editortmpinhg',
default=False,
)
-coreconfigitem('experimental', 'evolution',
+coreconfigitem('experimental', 'stabilization',
default=list,
+ alias=[('experimental', 'evolution')],
)
-coreconfigitem('experimental', 'evolution.bundle-obsmarker',
+coreconfigitem('experimental', 'stabilization.bundle-obsmarker',
default=False,
+ alias=[('experimental', 'evolution.bundle-obsmarker')],
)
-coreconfigitem('experimental', 'evolution.track-operation',
- default=False,
+coreconfigitem('experimental', 'stabilization.track-operation',
+ default=True,
+ alias=[('experimental', 'evolution.track-operation')]
)
coreconfigitem('experimental', 'exportableenviron',
default=list,
@@ -214,6 +227,9 @@
coreconfigitem('experimental', 'obsmarkers-exchange-debug',
default=False,
)
+coreconfigitem('experimental', 'rebase.multidest',
+ default=False,
+)
coreconfigitem('experimental', 'revertalternateinteractivemode',
default=True,
)
@@ -343,8 +359,8 @@
coreconfigitem('progress', 'disable',
default=False,
)
-coreconfigitem('progress', 'estimate',
- default=2,
+coreconfigitem('progress', 'estimateinterval',
+ default=60.0,
)
coreconfigitem('progress', 'refresh',
default=0.1,
@@ -352,6 +368,9 @@
coreconfigitem('progress', 'width',
default=dynamicdefault,
)
+coreconfigitem('push', 'pushvars.server',
+ default=False,
+)
coreconfigitem('server', 'bundle1',
default=True,
)
@@ -567,6 +586,69 @@
coreconfigitem('verify', 'skipflags',
default=None,
)
+coreconfigitem('web', 'accesslog',
+ default='-',
+)
+coreconfigitem('web', 'address',
+ default='',
+)
+coreconfigitem('web', 'allow_archive',
+ default=list,
+)
+coreconfigitem('web', 'allow_read',
+ default=list,
+)
+coreconfigitem('web', 'baseurl',
+ default=None,
+)
+coreconfigitem('web', 'cacerts',
+ default=None,
+)
+coreconfigitem('web', 'certificate',
+ default=None,
+)
+coreconfigitem('web', 'collapse',
+ default=False,
+)
+coreconfigitem('web', 'csp',
+ default=None,
+)
+coreconfigitem('web', 'deny_read',
+ default=list,
+)
+coreconfigitem('web', 'descend',
+ default=True,
+)
+coreconfigitem('web', 'description',
+ default="",
+)
+coreconfigitem('web', 'encoding',
+ default=lambda: encoding.encoding,
+)
+coreconfigitem('web', 'errorlog',
+ default='-',
+)
+coreconfigitem('web', 'ipv6',
+ default=False,
+)
+coreconfigitem('web', 'port',
+ default=8000,
+)
+coreconfigitem('web', 'prefix',
+ default='',
+)
+coreconfigitem('web', 'refreshinterval',
+ default=20,
+)
+coreconfigitem('web', 'stripes',
+ default=1,
+)
+coreconfigitem('web', 'style',
+ default='paper',
+)
+coreconfigitem('web', 'templates',
+ default=None,
+)
coreconfigitem('worker', 'backgroundclose',
default=dynamicdefault,
)
--- a/mercurial/context.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/context.py Sat Sep 30 07:52:48 2017 -0700
@@ -103,12 +103,10 @@
return self.manifest()
def _matchstatus(self, other, match):
- """return match.always if match is none
-
- This internal method provides a way for child objects to override the
+ """This internal method provides a way for child objects to override the
match operator.
"""
- return match or matchmod.always(self._repo.root, self._repo.getcwd())
+ return match
def _buildstatus(self, other, s, match, listignored, listclean,
listunknown):
@@ -204,44 +202,85 @@
return self.rev() in obsmod.getrevs(self._repo, 'extinct')
def unstable(self):
+ msg = ("'context.unstable' is deprecated, "
+ "use 'context.orphan'")
+ self._repo.ui.deprecwarn(msg, '4.4')
+ return self.orphan()
+
+ def orphan(self):
"""True if the changeset is not obsolete but it's ancestor are"""
- return self.rev() in obsmod.getrevs(self._repo, 'unstable')
+ return self.rev() in obsmod.getrevs(self._repo, 'orphan')
def bumped(self):
+ msg = ("'context.bumped' is deprecated, "
+ "use 'context.phasedivergent'")
+ self._repo.ui.deprecwarn(msg, '4.4')
+ return self.phasedivergent()
+
+ def phasedivergent(self):
"""True if the changeset try to be a successor of a public changeset
Only non-public and non-obsolete changesets may be bumped.
"""
- return self.rev() in obsmod.getrevs(self._repo, 'bumped')
+ return self.rev() in obsmod.getrevs(self._repo, 'phasedivergent')
def divergent(self):
+ msg = ("'context.divergent' is deprecated, "
+ "use 'context.contentdivergent'")
+ self._repo.ui.deprecwarn(msg, '4.4')
+ return self.contentdivergent()
+
+ def contentdivergent(self):
"""Is a successors of a changeset with multiple possible successors set
Only non-public and non-obsolete changesets may be divergent.
"""
- return self.rev() in obsmod.getrevs(self._repo, 'divergent')
+ return self.rev() in obsmod.getrevs(self._repo, 'contentdivergent')
def troubled(self):
+ msg = ("'context.troubled' is deprecated, "
+ "use 'context.isunstable'")
+ self._repo.ui.deprecwarn(msg, '4.4')
+ return self.isunstable()
+
+ def isunstable(self):
"""True if the changeset is either unstable, bumped or divergent"""
- return self.unstable() or self.bumped() or self.divergent()
+ return self.orphan() or self.phasedivergent() or self.contentdivergent()
def troubles(self):
- """return the list of troubles affecting this changesets.
-
- Troubles are returned as strings. possible values are:
- - unstable,
- - bumped,
- - divergent.
+ """Keep the old version around in order to avoid breaking extensions
+ about different return values.
"""
+ msg = ("'context.troubles' is deprecated, "
+ "use 'context.instabilities'")
+ self._repo.ui.deprecwarn(msg, '4.4')
+
troubles = []
- if self.unstable():
- troubles.append('unstable')
- if self.bumped():
+ if self.orphan():
+ troubles.append('orphan')
+ if self.phasedivergent():
troubles.append('bumped')
- if self.divergent():
+ if self.contentdivergent():
troubles.append('divergent')
return troubles
+ def instabilities(self):
+ """return the list of instabilities affecting this changeset.
+
+ Instabilities are returned as strings. possible values are:
+ - orphan,
+ - phase-divergent,
+ - content-divergent.
+ """
+ instabilities = []
+ if self.orphan():
+ instabilities.append('orphan')
+ if self.phasedivergent():
+ instabilities.append('phase-divergent')
+ if self.contentdivergent():
+ instabilities.append('content-divergent')
+ return instabilities
+
def parents(self):
"""return contexts for each parent changeset"""
return self._parents
@@ -351,6 +390,7 @@
reversed = True
ctx1, ctx2 = ctx2, ctx1
+ match = match or matchmod.always(self._repo.root, self._repo.getcwd())
match = ctx2._matchstatus(ctx1, match)
r = scmutil.status([], [], [], [], [], [], [])
r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
@@ -1056,6 +1096,13 @@
c = visit.pop(max(visit))
yield c
+ def decodeddata(self):
+ """Returns `data()` after running repository decoding filters.
+
+ This is often equivalent to how the data would be expressed on disk.
+ """
+ return self._repo.wwritedata(self.path(), self.data())
+
def _annotatepair(parents, childfctx, child, skipchild, diffopts):
r'''
Given parent and child fctxes and annotate data for parents, for all lines
@@ -1065,16 +1112,16 @@
Additionally, if `skipchild` is True, replace all other lines with parent
annotate data as well such that child is never blamed for any lines.
- >>> oldfctx = 'old'
- >>> p1fctx, p2fctx, childfctx = 'p1', 'p2', 'c'
- >>> olddata = 'a\nb\n'
- >>> p1data = 'a\nb\nc\n'
- >>> p2data = 'a\nc\nd\n'
- >>> childdata = 'a\nb2\nc\nc2\nd\n'
+ >>> oldfctx = b'old'
+ >>> p1fctx, p2fctx, childfctx = b'p1', b'p2', b'c'
+ >>> olddata = b'a\nb\n'
+ >>> p1data = b'a\nb\nc\n'
+ >>> p2data = b'a\nc\nd\n'
+ >>> childdata = b'a\nb2\nc\nc2\nd\n'
>>> diffopts = mdiff.diffopts()
>>> def decorate(text, rev):
- ... return ([(rev, i) for i in xrange(1, text.count('\n') + 1)], text)
+ ... return ([(rev, i) for i in xrange(1, text.count(b'\n') + 1)], text)
Basic usage:
@@ -1431,8 +1478,9 @@
def walk(self, match):
'''Generates matching file names.'''
- return sorted(self._repo.dirstate.walk(match, sorted(self.substate),
- True, False))
+ return sorted(self._repo.dirstate.walk(match,
+ subrepos=sorted(self.substate),
+ unknown=True, ignored=False))
def matches(self, match):
return sorted(self._repo.dirstate.matches(match))
@@ -1614,6 +1662,9 @@
listsubrepos=listsubrepos, badfn=badfn,
icasefs=icasefs)
+ def flushall(self):
+ pass # For overlayworkingfilectx compatibility.
+
def _filtersuspectsymlink(self, files):
if not files or self._repo.dirstate._checklink:
return files
@@ -1703,16 +1754,13 @@
# Even if the wlock couldn't be grabbed, clear out the list.
self._repo.clearpostdsstatus()
- def _dirstatestatus(self, match=None, ignored=False, clean=False,
- unknown=False):
+ def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
'''Gets the status from the dirstate -- internal use only.'''
- listignored, listclean, listunknown = ignored, clean, unknown
- match = match or matchmod.always(self._repo.root, self._repo.getcwd())
subrepos = []
if '.hgsub' in self:
subrepos = sorted(self.substate)
- cmp, s = self._repo.dirstate.status(match, subrepos, listignored,
- listclean, listunknown)
+ cmp, s = self._repo.dirstate.status(match, subrepos, ignored=ignored,
+ clean=clean, unknown=unknown)
# check for any possibly clean files
fixup = []
@@ -1721,7 +1769,7 @@
s.modified.extend(modified2)
s.deleted.extend(deleted2)
- if fixup and listclean:
+ if fixup and clean:
s.clean.extend(fixup)
self._poststatusfixup(s, fixup)
@@ -1800,8 +1848,6 @@
If we aren't comparing against the working directory's parent, then we
just use the default match object sent to us.
"""
- superself = super(workingctx, self)
- match = superself._matchstatus(other, match)
if other != self._repo['.']:
def bad(f, msg):
# 'f' may be a directory pattern from 'match.files()',
@@ -1920,9 +1966,202 @@
self._repo.wwrite(self._path, data, flags,
backgroundclose=backgroundclose)
+ def clearunknown(self):
+ """Removes conflicting items in the working directory so that
+ ``write()`` can be called successfully.
+ """
+ wvfs = self._repo.wvfs
+ if wvfs.isdir(self._path) and not wvfs.islink(self._path):
+ wvfs.removedirs(self._path)
+
def setflags(self, l, x):
self._repo.wvfs.setflags(self._path, l, x)
+class overlayworkingctx(workingctx):
+ """Wraps another mutable context with a write-back cache that can be flushed
+ at a later time.
+
+ self._cache[path] maps to a dict with keys: {
+ 'exists': bool?
+ 'date': date?
+ 'data': str?
+ 'flags': str?
+ }
+ If `exists` is True, `flags` must be non-None and 'date' is non-None. If it
+ is `False`, the file was deleted.
+ """
+
+ def __init__(self, repo, wrappedctx):
+ super(overlayworkingctx, self).__init__(repo)
+ self._repo = repo
+ self._wrappedctx = wrappedctx
+ self._clean()
+
+ def data(self, path):
+ if self.isdirty(path):
+ if self._cache[path]['exists']:
+ if self._cache[path]['data']:
+ return self._cache[path]['data']
+ else:
+ # Must fallback here, too, because we only set flags.
+ return self._wrappedctx[path].data()
+ else:
+ raise error.ProgrammingError("No such file or directory: %s" %
+ self._path)
+ else:
+ return self._wrappedctx[path].data()
+
+ def filedate(self, path):
+ if self.isdirty(path):
+ return self._cache[path]['date']
+ else:
+ return self._wrappedctx[path].date()
+
+ def flags(self, path):
+ if self.isdirty(path):
+ if self._cache[path]['exists']:
+ return self._cache[path]['flags']
+ else:
+ raise error.ProgrammingError("No such file or directory: %s" %
+ self._path)
+ else:
+ return self._wrappedctx[path].flags()
+
+ def write(self, path, data, flags=''):
+ if data is None:
+ raise error.ProgrammingError("data must be non-None")
+ self._markdirty(path, exists=True, data=data, date=util.makedate(),
+ flags=flags)
+
+ def setflags(self, path, l, x):
+ self._markdirty(path, exists=True, date=util.makedate(),
+ flags=(l and 'l' or '') + (x and 'x' or ''))
+
+ def remove(self, path):
+ self._markdirty(path, exists=False)
+
+ def exists(self, path):
+ """exists behaves like `lexists`, but needs to follow symlinks and
+ return False if they are broken.
+ """
+ if self.isdirty(path):
+ # If this path exists and is a symlink, "follow" it by calling
+ # exists on the destination path.
+ if (self._cache[path]['exists'] and
+ 'l' in self._cache[path]['flags']):
+ return self.exists(self._cache[path]['data'].strip())
+ else:
+ return self._cache[path]['exists']
+ return self._wrappedctx[path].exists()
+
+ def lexists(self, path):
+ """lexists returns True if the path exists"""
+ if self.isdirty(path):
+ return self._cache[path]['exists']
+ return self._wrappedctx[path].lexists()
+
+ def size(self, path):
+ if self.isdirty(path):
+ if self._cache[path]['exists']:
+ return len(self._cache[path]['data'])
+ else:
+ raise error.ProgrammingError("No such file or directory: %s" %
+ self._path)
+ return self._wrappedctx[path].size()
+
+ def flushall(self):
+ for path in self._writeorder:
+ entry = self._cache[path]
+ if entry['exists']:
+ self._wrappedctx[path].clearunknown()
+ if entry['data'] is not None:
+ if entry['flags'] is None:
+ raise error.ProgrammingError('data set but not flags')
+ self._wrappedctx[path].write(
+ entry['data'],
+ entry['flags'])
+ else:
+ self._wrappedctx[path].setflags(
+ 'l' in entry['flags'],
+ 'x' in entry['flags'])
+ else:
+ self._wrappedctx[path].remove(path)
+ self._clean()
+
+ def isdirty(self, path):
+ return path in self._cache
+
+ def _clean(self):
+ self._cache = {}
+ self._writeorder = []
+
+ def _markdirty(self, path, exists, data=None, date=None, flags=''):
+ if path not in self._cache:
+ self._writeorder.append(path)
+
+ self._cache[path] = {
+ 'exists': exists,
+ 'data': data,
+ 'date': date,
+ 'flags': flags,
+ }
+
+ def filectx(self, path, filelog=None):
+ return overlayworkingfilectx(self._repo, path, parent=self,
+ filelog=filelog)
+
+class overlayworkingfilectx(workingfilectx):
+ """Wrap a ``workingfilectx`` but intercepts all writes into an in-memory
+ cache, which can be flushed through later by calling ``flush()``."""
+
+ def __init__(self, repo, path, filelog=None, parent=None):
+ super(overlayworkingfilectx, self).__init__(repo, path, filelog,
+ parent)
+ self._repo = repo
+ self._parent = parent
+ self._path = path
+
+ def ctx(self):
+ return self._parent
+
+ def data(self):
+ return self._parent.data(self._path)
+
+ def date(self):
+ return self._parent.filedate(self._path)
+
+ def exists(self):
+ return self.lexists()
+
+ def lexists(self):
+ return self._parent.exists(self._path)
+
+ def renamed(self):
+ # Copies are currently tracked in the dirstate as before. Straight copy
+ # from workingfilectx.
+ rp = self._repo.dirstate.copied(self._path)
+ if not rp:
+ return None
+ return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
+
+ def size(self):
+ return self._parent.size(self._path)
+
+ def audit(self):
+ pass
+
+ def flags(self):
+ return self._parent.flags(self._path)
+
+ def setflags(self, islink, isexec):
+ return self._parent.setflags(self._path, islink, isexec)
+
+ def write(self, data, flags, backgroundclose=False):
+ return self._parent.write(self._path, data, flags)
+
+ def remove(self, ignoremissing=False):
+ return self._parent.remove(self._path)
+
class workingcommitctx(workingctx):
"""A workingcommitctx object makes access to data related to
the revision being committed convenient.
@@ -1935,14 +2174,12 @@
super(workingctx, self).__init__(repo, text, user, date, extra,
changes)
- def _dirstatestatus(self, match=None, ignored=False, clean=False,
- unknown=False):
+ def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
"""Return matched files only in ``self._status``
Uncommitted files appear "clean" via this context, even if
they aren't actually so in the working directory.
"""
- match = match or matchmod.always(self._repo.root, self._repo.getcwd())
if clean:
clean = [f for f in self._manifest if f not in self._changedset]
else:
@@ -2257,15 +2494,23 @@
def __new__(cls, repo, originalctx, *args, **kwargs):
return super(metadataonlyctx, cls).__new__(cls, repo)
- def __init__(self, repo, originalctx, parents, text, user=None, date=None,
- extra=None, editor=False):
+ def __init__(self, repo, originalctx, parents=None, text=None, user=None,
+ date=None, extra=None, editor=False):
+ if text is None:
+ text = originalctx.description()
super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
self._rev = None
self._node = None
self._originalctx = originalctx
self._manifestnode = originalctx.manifestnode()
- parents = [(p or nullid) for p in parents]
- p1, p2 = self._parents = [changectx(self._repo, p) for p in parents]
+ if parents is None:
+ parents = originalctx.parents()
+ else:
+ parents = [repo[p] for p in parents if p is not None]
+ parents = parents[:]
+ while len(parents) < 2:
+ parents.append(repo[nullid])
+ p1, p2 = self._parents = parents
# sanity check to ensure that the reused manifest parents are
# manifests of our commit parents
@@ -2322,9 +2567,40 @@
for f in self._files:
if not managing(f):
added.append(f)
- elif self[f]:
+ elif f in self:
modified.append(f)
else:
removed.append(f)
return scmutil.status(modified, added, removed, [], [], [], [])
+
+class arbitraryfilectx(object):
+ """Allows you to use filectx-like functions on a file in an arbitrary
+ location on disk, possibly not in the working directory.
+ """
+ def __init__(self, path):
+ self._path = path
+
+ def cmp(self, otherfilectx):
+ return self.data() != otherfilectx.data()
+
+ def path(self):
+ return self._path
+
+ def flags(self):
+ return ''
+
+ def data(self):
+ return util.readfile(self._path)
+
+ def decodeddata(self):
+ with open(self._path, "rb") as f:
+ return f.read()
+
+ def remove(self):
+ util.unlink(self._path)
+
+ def write(self, data, flags):
+ assert not flags
+ with open(self._path, "w") as f:
+ f.write(data)
--- a/mercurial/copies.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/copies.py Sat Sep 30 07:52:48 2017 -0700
@@ -7,9 +7,12 @@
from __future__ import absolute_import
+import collections
import heapq
+import os
from . import (
+ match as matchmod,
node,
pathutil,
scmutil,
@@ -137,7 +140,7 @@
def _dirstatecopies(d):
ds = d._repo.dirstate
c = ds.copies().copy()
- for k in c.keys():
+ for k in list(c):
if ds[k] not in 'anm':
del c[k]
return c
@@ -182,8 +185,9 @@
# optimization, since the ctx.files() for a merge commit is not correct for
# this comparison.
forwardmissingmatch = match
- if not match and b.p1() == a and b.p2().node() == node.nullid:
- forwardmissingmatch = scmutil.matchfiles(a._repo, b.files())
+ if b.p1() == a and b.p2().node() == node.nullid:
+ filesmatcher = scmutil.matchfiles(a._repo, b.files())
+ forwardmissingmatch = matchmod.intersectmatchers(match, filesmatcher)
missing = _computeforwardmissing(a, b, match=forwardmissingmatch)
ancestrycontext = a._repo.changelog.ancestors([b.rev()], inclusive=True)
@@ -201,7 +205,7 @@
return cm
def _backwardrenames(a, b):
- if a._repo.ui.configbool('experimental', 'disablecopytrace'):
+ if a._repo.ui.config('experimental', 'copytrace') == 'off':
return {}
# Even though we're not taking copies into account, 1:n rename situations
@@ -304,8 +308,27 @@
def mergecopies(repo, c1, c2, base):
"""
- Find moves and copies between context c1 and c2 that are relevant
- for merging. 'base' will be used as the merge base.
+ The function calling different copytracing algorithms on the basis of config
+ which find moves and copies between context c1 and c2 that are relevant for
+ merging. 'base' will be used as the merge base.
+
+ Copytracing is used in commands like rebase, merge, unshelve, etc to merge
+ files that were moved/ copied in one merge parent and modified in another.
+ For example:
+
+ o ---> 4 another commit
+ |
+ | o ---> 3 commit that modifies a.txt
+ | /
+ o / ---> 2 commit that moves a.txt to b.txt
+ |/
+ o ---> 1 merge base
+
+ If we try to rebase revision 3 on revision 4, since there is no a.txt in
+ revision 4, and if user have copytrace disabled, we prints the following
+ message:
+
+ ```other changed <file> which local deleted```
Returns five dicts: "copy", "movewithdir", "diverge", "renamedelete" and
"dirmove".
@@ -336,12 +359,45 @@
if c2.node() is None and c1.node() == repo.dirstate.p1():
return repo.dirstate.copies(), {}, {}, {}, {}
+ copytracing = repo.ui.config('experimental', 'copytrace')
+
# Copy trace disabling is explicitly below the node == p1 logic above
# because the logic above is required for a simple copy to be kept across a
# rebase.
- if repo.ui.configbool('experimental', 'disablecopytrace'):
+ if copytracing == 'off':
return {}, {}, {}, {}, {}
+ elif copytracing == 'heuristics':
+ # Do full copytracing if only non-public revisions are involved as
+ # that will be fast enough and will also cover the copies which could
+ # be missed by heuristics
+ if _isfullcopytraceable(repo, c1, base):
+ return _fullcopytracing(repo, c1, c2, base)
+ return _heuristicscopytracing(repo, c1, c2, base)
+ else:
+ return _fullcopytracing(repo, c1, c2, base)
+def _isfullcopytraceable(repo, c1, base):
+ """ Checks that if base, source and destination are all no-public branches,
+ if yes let's use the full copytrace algorithm for increased capabilities
+ since it will be fast enough.
+ """
+ if c1.rev() is None:
+ c1 = c1.p1()
+ if c1.mutable() and base.mutable():
+ sourcecommitlimit = repo.ui.configint('experimental',
+ 'copytrace.sourcecommitlimit')
+ commits = len(repo.revs('%d::%d', base.rev(), c1.rev()))
+ return commits < sourcecommitlimit
+ return False
+
+def _fullcopytracing(repo, c1, c2, base):
+ """ The full copytracing algorithm which finds all the new files that were
+ added from merge base up to the top commit and for each file it checks if
+ this file was copied from another file.
+
+ This is pretty slow when a lot of changesets are involved but will track all
+ the copies.
+ """
# In certain scenarios (e.g. graft, update or rebase), base can be
# overridden We still need to know a real common ancestor in this case We
# can't just compute _c1.ancestor(_c2) and compare it to ca, because there
@@ -357,7 +413,7 @@
# if we have a dirty endpoint, we need to trigger graft logic, and also
# keep track of which endpoint is dirty
dirtyc1 = not (base == _c1 or base.descendant(_c1))
- dirtyc2 = not (base== _c2 or base.descendant(_c2))
+ dirtyc2 = not (base == _c2 or base.descendant(_c2))
graft = dirtyc1 or dirtyc2
tca = base
if graft:
@@ -434,7 +490,7 @@
renamedelete = {}
renamedeleteset = set()
divergeset = set()
- for of, fl in diverge.items():
+ for of, fl in list(diverge.items()):
if len(fl) == 1 or of in c1 or of in c2:
del diverge[of] # not actually divergent, or not a rename
if of not in c1 and of not in c2:
@@ -566,6 +622,94 @@
return copy, movewithdir, diverge, renamedelete, dirmove
+def _heuristicscopytracing(repo, c1, c2, base):
+ """ Fast copytracing using filename heuristics
+
+ Assumes that moves or renames are of following two types:
+
+ 1) Inside a directory only (same directory name but different filenames)
+ 2) Move from one directory to another
+ (same filenames but different directory names)
+
+ Works only when there are no merge commits in the "source branch".
+ Source branch is commits from base up to c2 not including base.
+
+ If merge is involved it fallbacks to _fullcopytracing().
+
+ Can be used by setting the following config:
+
+ [experimental]
+ copytrace = heuristics
+ """
+
+ if c1.rev() is None:
+ c1 = c1.p1()
+ if c2.rev() is None:
+ c2 = c2.p1()
+
+ copies = {}
+
+ changedfiles = set()
+ m1 = c1.manifest()
+ if not repo.revs('%d::%d', base.rev(), c2.rev()):
+ # If base is not in c2 branch, we switch to fullcopytracing
+ repo.ui.debug("switching to full copytracing as base is not "
+ "an ancestor of c2\n")
+ return _fullcopytracing(repo, c1, c2, base)
+
+ ctx = c2
+ while ctx != base:
+ if len(ctx.parents()) == 2:
+ # To keep things simple let's not handle merges
+ repo.ui.debug("switching to full copytracing because of merges\n")
+ return _fullcopytracing(repo, c1, c2, base)
+ changedfiles.update(ctx.files())
+ ctx = ctx.p1()
+
+ cp = _forwardcopies(base, c2)
+ for dst, src in cp.iteritems():
+ if src in m1:
+ copies[dst] = src
+
+ # file is missing if it isn't present in the destination, but is present in
+ # the base and present in the source.
+ # Presence in the base is important to exclude added files, presence in the
+ # source is important to exclude removed files.
+ missingfiles = filter(lambda f: f not in m1 and f in base and f in c2,
+ changedfiles)
+
+ if missingfiles:
+ basenametofilename = collections.defaultdict(list)
+ dirnametofilename = collections.defaultdict(list)
+
+ for f in m1.filesnotin(base.manifest()):
+ basename = os.path.basename(f)
+ dirname = os.path.dirname(f)
+ basenametofilename[basename].append(f)
+ dirnametofilename[dirname].append(f)
+
+ # in case of a rebase/graft, base may not be a common ancestor
+ anc = c1.ancestor(c2)
+
+ for f in missingfiles:
+ basename = os.path.basename(f)
+ dirname = os.path.dirname(f)
+ samebasename = basenametofilename[basename]
+ samedirname = dirnametofilename[dirname]
+ movecandidates = samebasename + samedirname
+ # f is guaranteed to be present in c2, that's why
+ # c2.filectx(f) won't fail
+ f2 = c2.filectx(f)
+ for candidate in movecandidates:
+ f1 = c1.filectx(candidate)
+ if _related(f1, f2, anc.rev()):
+ # if there are a few related copies then we'll merge
+ # changes into all of them. This matches the behaviour
+ # of upstream copytracing
+ copies[candidate] = f
+
+ return copies, {}, {}, {}, {}
+
def _related(f1, f2, limit):
"""return True if f1 and f2 filectx have a common ancestor
@@ -613,8 +757,8 @@
limit = the rev number to not search beyond
data = dictionary of dictionary to store copy data. (see mergecopies)
- note: limit is only an optimization, and there is no guarantee that
- irrelevant revisions will not be limited
+ note: limit is only an optimization, and provides no guarantee that
+ irrelevant revisions will not be visited
there is no easy way to make this algorithm stop in a guaranteed way
once it "goes behind a certain revision".
"""
@@ -704,8 +848,8 @@
'''
exclude = {}
if (skiprev is not None and
- not repo.ui.configbool('experimental', 'disablecopytrace')):
- # disablecopytrace skips this line, but not the entire function because
+ repo.ui.config('experimental', 'copytrace') != 'off'):
+ # copytrace='off' skips this line, but not the entire function because
# the line below is O(size of the repo) during a rebase, while the rest
# of the function is much faster (and is required for carrying copy
# metadata across the rebase anyway).
--- a/mercurial/crecord.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/crecord.py Sat Sep 30 07:52:48 2017 -0700
@@ -1010,6 +1010,13 @@
def _getstatuslinesegments(self):
"""-> [str]. return segments"""
selected = self.currentselecteditem.applied
+ spaceselect = _('space: select')
+ spacedeselect = _('space: deselect')
+ # Format the selected label into a place as long as the longer of the
+ # two possible labels. This may vary by language.
+ spacelen = max(len(spaceselect), len(spacedeselect))
+ selectedlabel = '%-*s' % (spacelen,
+ spacedeselect if selected else spaceselect)
segments = [
_headermessages[self.operation],
'-',
@@ -1017,7 +1024,7 @@
_('c: confirm'),
_('q: abort'),
_('arrow keys: move/expand/collapse'),
- _('space: deselect') if selected else _('space: select'),
+ selectedlabel,
_('?: help'),
]
return segments
@@ -1433,6 +1440,17 @@
except curses.error:
pass
+ def commitMessageWindow(self):
+ "Create a temporary commit message editing window on the screen."
+
+ curses.raw()
+ curses.def_prog_mode()
+ curses.endwin()
+ self.commenttext = self.ui.edit(self.commenttext, self.ui.username())
+ curses.cbreak()
+ self.stdscr.refresh()
+ self.stdscr.keypad(1) # allow arrow-keys to continue to function
+
def confirmationwindow(self, windowtext):
"display an informational window, then wait for and return a keypress."
@@ -1545,8 +1563,7 @@
# start the editor and wait for it to complete
try:
- patch = self.ui.edit(patch.getvalue(), "",
- extra={"suffix": ".diff"})
+ patch = self.ui.edit(patch.getvalue(), "", action="diff")
except error.Abort as exc:
self.errorstr = str(exc)
return None
@@ -1654,6 +1671,8 @@
self.togglefolded()
elif keypressed in ["F"]:
self.togglefolded(foldparent=True)
+ elif keypressed in ["m"]:
+ self.commitMessageWindow()
elif keypressed in ["?"]:
self.helpwindow()
self.stdscr.clear()
@@ -1729,3 +1748,8 @@
keypressed = "foobar"
if self.handlekeypressed(keypressed):
break
+
+ if self.commenttext != "":
+ whitespaceremoved = re.sub("(?m)^\s.*(\n|$)", "", self.commenttext)
+ if whitespaceremoved != "":
+ self.opts['message'] = self.commenttext
--- a/mercurial/dagop.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/dagop.py Sat Sep 30 07:52:48 2017 -0700
@@ -75,27 +75,49 @@
if prev != node.nullrev:
heapq.heappush(pendingheap, (heapsign * prev, pdepth))
-def _genrevancestors(repo, revs, followfirst, startdepth, stopdepth):
+def _genrevancestors(repo, revs, followfirst, startdepth, stopdepth, cutfunc):
if followfirst:
cut = 1
else:
cut = None
cl = repo.changelog
- def pfunc(rev):
+ def plainpfunc(rev):
try:
return cl.parentrevs(rev)[:cut]
except error.WdirUnsupported:
return (pctx.rev() for pctx in repo[rev].parents()[:cut])
+ if cutfunc is None:
+ pfunc = plainpfunc
+ else:
+ pfunc = lambda rev: [r for r in plainpfunc(rev) if not cutfunc(r)]
+ revs = revs.filter(lambda rev: not cutfunc(rev))
return _walkrevtree(pfunc, revs, startdepth, stopdepth, reverse=True)
-def revancestors(repo, revs, followfirst, startdepth=None, stopdepth=None):
+def revancestors(repo, revs, followfirst=False, startdepth=None,
+ stopdepth=None, cutfunc=None):
"""Like revlog.ancestors(), but supports additional options, includes
the given revs themselves, and returns a smartset
Scan ends at the stopdepth (exlusive) if specified. Revisions found
earlier than the startdepth are omitted.
+
+ If cutfunc is provided, it will be used to cut the traversal of the DAG.
+ When cutfunc(X) returns True, the DAG traversal stops - revision X and
+ X's ancestors in the traversal path will be skipped. This could be an
+ optimization sometimes.
+
+ Note: if Y is an ancestor of X, cutfunc(X) returning True does not
+ necessarily mean Y will also be cut. Usually cutfunc(Y) also wants to
+ return True in this case. For example,
+
+ D # revancestors(repo, D, cutfunc=lambda rev: rev == B)
+ |\ # will include "A", because the path D -> C -> A was not cut.
+ B C # If "B" gets cut, "A" might want to be cut too.
+ |/
+ A
"""
- gen = _genrevancestors(repo, revs, followfirst, startdepth, stopdepth)
+ gen = _genrevancestors(repo, revs, followfirst, startdepth, stopdepth,
+ cutfunc)
return generatorset(gen, iterasc=False)
def _genrevdescendants(repo, revs, followfirst):
--- a/mercurial/dagparser.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/dagparser.py Sat Sep 30 07:52:48 2017 -0700
@@ -11,7 +11,11 @@
import string
from .i18n import _
-from . import error
+from . import (
+ error,
+ pycompat,
+ util,
+)
def parsedag(desc):
'''parses a DAG from a concise textual description; generates events
@@ -54,7 +58,7 @@
Example of a complex graph (output not shown for brevity):
- >>> len(list(parsedag("""
+ >>> len(list(parsedag(b"""
...
... +3 # 3 nodes in linear run
... :forkhere # a label for the last of the 3 nodes from above
@@ -74,96 +78,96 @@
Empty list:
- >>> list(parsedag(""))
+ >>> list(parsedag(b""))
[]
A simple linear run:
- >>> list(parsedag("+3"))
+ >>> list(parsedag(b"+3"))
[('n', (0, [-1])), ('n', (1, [0])), ('n', (2, [1]))]
Some non-standard ways to define such runs:
- >>> list(parsedag("+1+2"))
+ >>> list(parsedag(b"+1+2"))
[('n', (0, [-1])), ('n', (1, [0])), ('n', (2, [1]))]
- >>> list(parsedag("+1*1*"))
+ >>> list(parsedag(b"+1*1*"))
[('n', (0, [-1])), ('n', (1, [0])), ('n', (2, [1]))]
- >>> list(parsedag("*"))
+ >>> list(parsedag(b"*"))
[('n', (0, [-1]))]
- >>> list(parsedag("..."))
+ >>> list(parsedag(b"..."))
[('n', (0, [-1])), ('n', (1, [0])), ('n', (2, [1]))]
A fork and a join, using numeric back references:
- >>> list(parsedag("+2*2*/2"))
+ >>> list(parsedag(b"+2*2*/2"))
[('n', (0, [-1])), ('n', (1, [0])), ('n', (2, [0])), ('n', (3, [2, 1]))]
- >>> list(parsedag("+2<2+1/2"))
+ >>> list(parsedag(b"+2<2+1/2"))
[('n', (0, [-1])), ('n', (1, [0])), ('n', (2, [0])), ('n', (3, [2, 1]))]
Placing a label:
- >>> list(parsedag("+1 :mylabel +1"))
+ >>> list(parsedag(b"+1 :mylabel +1"))
[('n', (0, [-1])), ('l', (0, 'mylabel')), ('n', (1, [0]))]
An empty label (silly, really):
- >>> list(parsedag("+1:+1"))
+ >>> list(parsedag(b"+1:+1"))
[('n', (0, [-1])), ('l', (0, '')), ('n', (1, [0]))]
Fork and join, but with labels instead of numeric back references:
- >>> list(parsedag("+1:f +1:p2 *f */p2"))
+ >>> list(parsedag(b"+1:f +1:p2 *f */p2"))
[('n', (0, [-1])), ('l', (0, 'f')), ('n', (1, [0])), ('l', (1, 'p2')),
('n', (2, [0])), ('n', (3, [2, 1]))]
- >>> list(parsedag("+1:f +1:p2 <f +1 /p2"))
+ >>> list(parsedag(b"+1:f +1:p2 <f +1 /p2"))
[('n', (0, [-1])), ('l', (0, 'f')), ('n', (1, [0])), ('l', (1, 'p2')),
('n', (2, [0])), ('n', (3, [2, 1]))]
Restarting from the root:
- >>> list(parsedag("+1 $ +1"))
+ >>> list(parsedag(b"+1 $ +1"))
[('n', (0, [-1])), ('n', (1, [-1]))]
Annotations, which are meant to introduce sticky state for subsequent nodes:
- >>> list(parsedag("+1 @ann +1"))
+ >>> list(parsedag(b"+1 @ann +1"))
[('n', (0, [-1])), ('a', 'ann'), ('n', (1, [0]))]
- >>> list(parsedag('+1 @"my annotation" +1'))
+ >>> list(parsedag(b'+1 @"my annotation" +1'))
[('n', (0, [-1])), ('a', 'my annotation'), ('n', (1, [0]))]
Commands, which are meant to operate on the most recently created node:
- >>> list(parsedag("+1 !cmd +1"))
+ >>> list(parsedag(b"+1 !cmd +1"))
[('n', (0, [-1])), ('c', 'cmd'), ('n', (1, [0]))]
- >>> list(parsedag('+1 !"my command" +1'))
+ >>> list(parsedag(b'+1 !"my command" +1'))
[('n', (0, [-1])), ('c', 'my command'), ('n', (1, [0]))]
- >>> list(parsedag('+1 !!my command line\\n +1'))
+ >>> list(parsedag(b'+1 !!my command line\\n +1'))
[('n', (0, [-1])), ('C', 'my command line'), ('n', (1, [0]))]
Comments, which extend to the end of the line:
- >>> list(parsedag('+1 # comment\\n+1'))
+ >>> list(parsedag(b'+1 # comment\\n+1'))
[('n', (0, [-1])), ('n', (1, [0]))]
Error:
- >>> try: list(parsedag('+1 bad'))
- ... except Exception, e: print e
+ >>> try: list(parsedag(b'+1 bad'))
+ ... except Exception as e: print(pycompat.sysstr(bytes(e)))
invalid character in dag description: bad...
'''
if not desc:
return
- wordchars = string.ascii_letters + string.digits
+ wordchars = pycompat.bytestr(string.ascii_letters + string.digits)
labels = {}
p1 = -1
@@ -172,12 +176,12 @@
def resolve(ref):
if not ref:
return p1
- elif ref[0] in string.digits:
+ elif ref[0] in pycompat.bytestr(string.digits):
return r - int(ref)
else:
return labels[ref]
- chiter = (c for c in desc)
+ chiter = pycompat.iterbytestr(desc)
def nextch():
return next(chiter, '\0')
@@ -206,7 +210,7 @@
c = nextch()
while c != '\0':
- while c in string.whitespace:
+ while c in pycompat.bytestr(string.whitespace):
c = nextch()
if c == '.':
yield 'n', (r, [p1])
@@ -214,7 +218,7 @@
r += 1
c = nextch()
elif c == '+':
- c, digs = nextrun(nextch(), string.digits)
+ c, digs = nextrun(nextch(), pycompat.bytestr(string.digits))
n = int(digs)
for i in xrange(0, n):
yield 'n', (r, [p1])
@@ -313,7 +317,7 @@
if len(ps) == 1 and ps[0] == -1:
if needroot:
if run:
- yield '+' + str(run)
+ yield '+%d' % run
run = 0
if wrapnonlinear:
yield '\n'
@@ -328,7 +332,7 @@
run += 1
else:
if run:
- yield '+' + str(run)
+ yield '+%d' % run
run = 0
if wrapnonlinear:
yield '\n'
@@ -339,11 +343,11 @@
elif p in labels:
prefs.append(labels[p])
else:
- prefs.append(str(r - p))
+ prefs.append('%d' % (r - p))
yield '*' + '/'.join(prefs)
else:
if run:
- yield '+' + str(run)
+ yield '+%d' % run
run = 0
if kind == 'l':
rid, name = data
@@ -366,10 +370,12 @@
yield '#' + data
yield '\n'
else:
- raise error.Abort(_("invalid event type in dag: %s")
- % str((type, data)))
+ raise error.Abort(_("invalid event type in dag: "
+ "('%s', '%s')")
+ % (util.escapestr(kind),
+ util.escapestr(data)))
if run:
- yield '+' + str(run)
+ yield '+%d' % run
line = ''
for part in gen():
@@ -413,52 +419,54 @@
Linear run:
- >>> dagtext([('n', (0, [-1])), ('n', (1, [0]))])
+ >>> dagtext([(b'n', (0, [-1])), (b'n', (1, [0]))])
'+2'
Two roots:
- >>> dagtext([('n', (0, [-1])), ('n', (1, [-1]))])
+ >>> dagtext([(b'n', (0, [-1])), (b'n', (1, [-1]))])
'+1 $ +1'
Fork and join:
- >>> dagtext([('n', (0, [-1])), ('n', (1, [0])), ('n', (2, [0])),
- ... ('n', (3, [2, 1]))])
+ >>> dagtext([(b'n', (0, [-1])), (b'n', (1, [0])), (b'n', (2, [0])),
+ ... (b'n', (3, [2, 1]))])
'+2 *2 */2'
Fork and join with labels:
- >>> dagtext([('n', (0, [-1])), ('l', (0, 'f')), ('n', (1, [0])),
- ... ('l', (1, 'p2')), ('n', (2, [0])), ('n', (3, [2, 1]))])
+ >>> dagtext([(b'n', (0, [-1])), (b'l', (0, b'f')), (b'n', (1, [0])),
+ ... (b'l', (1, b'p2')), (b'n', (2, [0])), (b'n', (3, [2, 1]))])
'+1 :f +1 :p2 *f */p2'
Annotations:
- >>> dagtext([('n', (0, [-1])), ('a', 'ann'), ('n', (1, [0]))])
+ >>> dagtext([(b'n', (0, [-1])), (b'a', b'ann'), (b'n', (1, [0]))])
'+1 @ann +1'
- >>> dagtext([('n', (0, [-1])),
- ... ('a', 'my annotation'),
- ... ('n', (1, [0]))])
+ >>> dagtext([(b'n', (0, [-1])),
+ ... (b'a', b'my annotation'),
+ ... (b'n', (1, [0]))])
'+1 @"my annotation" +1'
Commands:
- >>> dagtext([('n', (0, [-1])), ('c', 'cmd'), ('n', (1, [0]))])
+ >>> dagtext([(b'n', (0, [-1])), (b'c', b'cmd'), (b'n', (1, [0]))])
'+1 !cmd +1'
- >>> dagtext([('n', (0, [-1])), ('c', 'my command'), ('n', (1, [0]))])
+ >>> dagtext([(b'n', (0, [-1])),
+ ... (b'c', b'my command'),
+ ... (b'n', (1, [0]))])
'+1 !"my command" +1'
- >>> dagtext([('n', (0, [-1])),
- ... ('C', 'my command line'),
- ... ('n', (1, [0]))])
+ >>> dagtext([(b'n', (0, [-1])),
+ ... (b'C', b'my command line'),
+ ... (b'n', (1, [0]))])
'+1 !!my command line\\n+1'
Comments:
- >>> dagtext([('n', (0, [-1])), ('#', ' comment'), ('n', (1, [0]))])
+ >>> dagtext([(b'n', (0, [-1])), (b'#', b' comment'), (b'n', (1, [0]))])
'+1 # comment\\n+1'
>>> dagtext([])
@@ -466,7 +474,7 @@
Combining parsedag and dagtext:
- >>> dagtext(parsedag('+1 :f +1 :p2 *f */p2'))
+ >>> dagtext(parsedag(b'+1 :f +1 :p2 *f */p2'))
'+1 :f +1 :p2 *f */p2'
'''
--- a/mercurial/debugcommands.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/debugcommands.py Sat Sep 30 07:52:48 2017 -0700
@@ -7,6 +7,8 @@
from __future__ import absolute_import
+import codecs
+import collections
import difflib
import errno
import operator
@@ -261,18 +263,11 @@
def showchunks(named):
ui.write("\n%s%s\n" % (indent_string, named))
- chain = None
- for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
- node = chunkdata['node']
- p1 = chunkdata['p1']
- p2 = chunkdata['p2']
- cs = chunkdata['cs']
- deltabase = chunkdata['deltabase']
- delta = chunkdata['delta']
+ for deltadata in gen.deltaiter():
+ node, p1, p2, cs, deltabase, delta, flags = deltadata
ui.write("%s%s %s %s %s %s %s\n" %
(indent_string, hex(node), hex(p1), hex(p2),
hex(cs), hex(deltabase), len(delta)))
- chain = node
chunkdata = gen.changelogheader()
showchunks("changelog")
@@ -285,11 +280,9 @@
if isinstance(gen, bundle2.unbundle20):
raise error.Abort(_('use debugbundle2 for this file'))
chunkdata = gen.changelogheader()
- chain = None
- for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
- node = chunkdata['node']
+ for deltadata in gen.deltaiter():
+ node, p1, p2, cs, deltabase, delta, flags = deltadata
ui.write("%s%s\n" % (indent_string, hex(node)))
- chain = node
def _debugobsmarkers(ui, part, indent=0, **opts):
"""display version and markers contained in 'data'"""
@@ -317,22 +310,28 @@
def _debugphaseheads(ui, data, indent=0):
"""display version and markers contained in 'data'"""
indent_string = ' ' * indent
- headsbyphase = bundle2._readphaseheads(data)
+ headsbyphase = phases.binarydecode(data)
for phase in phases.allphases:
for head in headsbyphase[phase]:
ui.write(indent_string)
ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
+def _quasirepr(thing):
+ if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
+ return '{%s}' % (
+ b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
+ return pycompat.bytestr(repr(thing))
+
def _debugbundle2(ui, gen, all=None, **opts):
"""lists the contents of a bundle2"""
if not isinstance(gen, bundle2.unbundle20):
raise error.Abort(_('not a bundle2 file'))
- ui.write(('Stream params: %s\n' % repr(gen.params)))
+ ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
parttypes = opts.get(r'part_type', [])
for part in gen.iterparts():
if parttypes and part.type not in parttypes:
continue
- ui.write('%s -- %r\n' % (part.type, repr(part.params)))
+ ui.write('%s -- %s\n' % (part.type, _quasirepr(part.params)))
if part.type == 'changegroup':
version = part.params.get('version', '01')
cg = changegroup.getunbundler(version, part, 'UN')
@@ -990,9 +989,9 @@
fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
err = None
try:
- encoding.fromlocal("test")
- except error.Abort as inst:
- err = inst
+ codecs.lookup(pycompat.sysstr(encoding.encoding))
+ except LookupError as inst:
+ err = util.forcebytestr(inst)
problems += 1
fm.condwrite(err, 'encodingerror', _(" %s\n"
" (check that your locale is properly set)\n"), err)
@@ -1048,7 +1047,7 @@
)
dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
except Exception as inst:
- err = inst
+ err = util.forcebytestr(inst)
problems += 1
fm.condwrite(err, 'extensionserror', " %s\n", err)
@@ -1080,7 +1079,7 @@
try:
templater.templater.frommapfile(m)
except Exception as inst:
- err = inst
+ err = util.forcebytestr(inst)
p = None
fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
else:
@@ -1116,7 +1115,7 @@
try:
username = ui.username()
except error.Abort as e:
- err = e
+ err = util.forcebytestr(e)
problems += 1
fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
--- a/mercurial/dirstate.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/dirstate.py Sat Sep 30 07:52:48 2017 -0700
@@ -54,20 +54,6 @@
os.close(tmpfd)
vfs.unlink(tmpname)
-def nonnormalentries(dmap):
- '''Compute the nonnormal dirstate entries from the dmap'''
- try:
- return parsers.nonnormalotherparententries(dmap)
- except AttributeError:
- nonnorm = set()
- otherparent = set()
- for fname, e in dmap.iteritems():
- if e[0] != 'n' or e[3] == -1:
- nonnorm.add(fname)
- if e[0] == 'n' and e[2] == -2:
- otherparent.add(fname)
- return nonnorm, otherparent
-
class dirstate(object):
def __init__(self, opener, ui, root, validate, sparsematchfn):
@@ -85,7 +71,6 @@
# UNC path pointing to root share (issue4557)
self._rootdir = pathutil.normasprefix(root)
self._dirty = False
- self._dirtypl = False
self._lastnormaltime = 0
self._ui = ui
self._filecache = {}
@@ -96,9 +81,6 @@
self._origpl = None
self._updatedfiles = set()
- # for consistent view between _pl() and _read() invocations
- self._pendingmode = None
-
@contextlib.contextmanager
def parentchange(self):
'''Context manager for handling dirstate parents.
@@ -151,44 +133,25 @@
return self._map
@propertycache
- def _copymap(self):
- self._read()
- return self._copymap
-
- @propertycache
def _identity(self):
self._read()
return self._identity
@propertycache
def _nonnormalset(self):
- nonnorm, otherparents = nonnormalentries(self._map)
+ nonnorm, otherparents = self._map.nonnormalentries()
self._otherparentset = otherparents
return nonnorm
@propertycache
def _otherparentset(self):
- nonnorm, otherparents = nonnormalentries(self._map)
+ nonnorm, otherparents = self._map.nonnormalentries()
self._nonnormalset = nonnorm
return otherparents
@propertycache
def _filefoldmap(self):
- try:
- makefilefoldmap = parsers.make_file_foldmap
- except AttributeError:
- pass
- else:
- return makefilefoldmap(self._map, util.normcasespec,
- util.normcasefallback)
-
- f = {}
- normcase = util.normcase
- for name, s in self._map.iteritems():
- if s[0] != 'r':
- f[normcase(name)] = name
- f['.'] = '.' # prevents useless util.fspath() invocation
- return f
+ return self._map.filefoldmap()
@propertycache
def _dirfoldmap(self):
@@ -220,25 +183,13 @@
raise
return "default"
- @propertycache
+ @property
def _pl(self):
- try:
- fp = self._opendirstatefile()
- st = fp.read(40)
- fp.close()
- l = len(st)
- if l == 40:
- return st[:20], st[20:40]
- elif l > 0 and l < 40:
- raise error.Abort(_('working directory state appears damaged!'))
- except IOError as err:
- if err.errno != errno.ENOENT:
- raise
- return [nullid, nullid]
+ return self._map.parents()
@propertycache
def _dirs(self):
- return util.dirs(self._map, 'r')
+ return self._map.dirs()
def dirs(self):
return self._dirs
@@ -359,8 +310,7 @@
return key in self._map
def __iter__(self):
- for x in sorted(self._map):
- yield x
+ return iter(sorted(self._map))
def items(self):
return self._map.iteritems()
@@ -392,11 +342,11 @@
raise ValueError("cannot set dirstate parent without "
"calling dirstate.beginparentchange")
- self._dirty = self._dirtypl = True
+ self._dirty = True
oldp2 = self._pl[1]
if self._origpl is None:
self._origpl = self._pl
- self._pl = p1, p2
+ self._map.setparents(p1, p2)
copies = {}
if oldp2 != nullid and p2 == nullid:
candidatefiles = self._nonnormalset.union(self._otherparentset)
@@ -407,13 +357,15 @@
# Discard 'm' markers when moving away from a merge state
if s[0] == 'm':
- if f in self._copymap:
- copies[f] = self._copymap[f]
+ source = self._map.copymap.get(f)
+ if source:
+ copies[f] = source
self.normallookup(f)
# Also fix up otherparent markers
elif s[0] == 'n' and s[2] == -2:
- if f in self._copymap:
- copies[f] = self._copymap[f]
+ source = self._map.copymap.get(f)
+ if source:
+ copies[f] = source
self.add(f)
return copies
@@ -433,23 +385,14 @@
f.discard()
raise
- def _opendirstatefile(self):
- fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
- if self._pendingmode is not None and self._pendingmode != mode:
- fp.close()
- raise error.Abort(_('working directory state may be '
- 'changed parallelly'))
- self._pendingmode = mode
- return fp
+ def _read(self):
+ self._map = dirstatemap(self._ui, self._opener, self._root)
- def _read(self):
- self._map = {}
- self._copymap = {}
# ignore HG_PENDING because identity is used only for writing
self._identity = util.filestat.frompath(
self._opener.join(self._filename))
try:
- fp = self._opendirstatefile()
+ fp = self._map._opendirstatefile()
try:
st = fp.read()
finally:
@@ -472,7 +415,7 @@
# This heuristic is imperfect in many ways, so in a future dirstate
# format update it makes sense to just record the number of entries
# on write.
- self._map = parsers.dict_new_presized(len(st) / 71)
+ self._map._map = parsers.dict_new_presized(len(st) / 71)
# Python's garbage collector triggers a GC each time a certain number
# of container objects (the number being defined by
@@ -487,9 +430,9 @@
#
# (we cannot decorate the function directly since it is in a C module)
parse_dirstate = util.nogc(parsers.parse_dirstate)
- p = parse_dirstate(self._map, self._copymap, st)
- if not self._dirtypl:
- self._pl = p
+ p = parse_dirstate(self._map._map, self._map.copymap, st)
+ if not self._map._dirtyparents:
+ self._map.setparents(*p)
def invalidate(self):
'''Causes the next access to reread the dirstate.
@@ -498,9 +441,9 @@
rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
check whether the dirstate has changed before rereading it.'''
- for a in ("_map", "_copymap", "_identity",
+ for a in ("_map", "_identity",
"_filefoldmap", "_dirfoldmap", "_branch",
- "_pl", "_dirs", "_ignore", "_nonnormalset",
+ "_dirs", "_ignore", "_nonnormalset",
"_otherparentset"):
if a in self.__dict__:
delattr(self, a)
@@ -516,18 +459,17 @@
return
self._dirty = True
if source is not None:
- self._copymap[dest] = source
+ self._map.copymap[dest] = source
self._updatedfiles.add(source)
self._updatedfiles.add(dest)
- elif dest in self._copymap:
- del self._copymap[dest]
+ elif self._map.copymap.pop(dest, None):
self._updatedfiles.add(dest)
def copied(self, file):
- return self._copymap.get(file, None)
+ return self._map.copymap.get(file, None)
def copies(self):
- return self._copymap
+ return self._map.copymap
def _droppath(self, f):
if self[f] not in "?r" and "_dirs" in self.__dict__:
@@ -550,7 +492,8 @@
for d in util.finddirs(f):
if d in self._dirs:
break
- if d in self._map and self[d] != 'r':
+ entry = self._map.get(d)
+ if entry is not None and entry[0] != 'r':
raise error.Abort(
_('file %r in dirstate clashes with %r') % (d, f))
if oldstate in "?r" and "_dirs" in self.__dict__:
@@ -569,8 +512,7 @@
mtime = s.st_mtime
self._addpath(f, 'n', s.st_mode,
s.st_size & _rangemask, mtime & _rangemask)
- if f in self._copymap:
- del self._copymap[f]
+ self._map.copymap.pop(f, None)
if f in self._nonnormalset:
self._nonnormalset.remove(f)
if mtime > self._lastnormaltime:
@@ -581,25 +523,25 @@
def normallookup(self, f):
'''Mark a file normal, but possibly dirty.'''
- if self._pl[1] != nullid and f in self._map:
+ if self._pl[1] != nullid:
# if there is a merge going on and the file was either
# in state 'm' (-1) or coming from other parent (-2) before
# being removed, restore that state.
- entry = self._map[f]
- if entry[0] == 'r' and entry[2] in (-1, -2):
- source = self._copymap.get(f)
- if entry[2] == -1:
- self.merge(f)
- elif entry[2] == -2:
- self.otherparent(f)
- if source:
- self.copy(source, f)
- return
- if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
- return
+ entry = self._map.get(f)
+ if entry is not None:
+ if entry[0] == 'r' and entry[2] in (-1, -2):
+ source = self._map.copymap.get(f)
+ if entry[2] == -1:
+ self.merge(f)
+ elif entry[2] == -2:
+ self.otherparent(f)
+ if source:
+ self.copy(source, f)
+ return
+ if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
+ return
self._addpath(f, 'n', 0, -1, -1)
- if f in self._copymap:
- del self._copymap[f]
+ self._map.copymap.pop(f, None)
if f in self._nonnormalset:
self._nonnormalset.remove(f)
@@ -614,33 +556,31 @@
else:
# add-like
self._addpath(f, 'n', 0, -2, -1)
-
- if f in self._copymap:
- del self._copymap[f]
+ self._map.copymap.pop(f, None)
def add(self, f):
'''Mark a file added.'''
self._addpath(f, 'a', 0, -1, -1)
- if f in self._copymap:
- del self._copymap[f]
+ self._map.copymap.pop(f, None)
def remove(self, f):
'''Mark a file removed.'''
self._dirty = True
self._droppath(f)
size = 0
- if self._pl[1] != nullid and f in self._map:
- # backup the previous state
- entry = self._map[f]
- if entry[0] == 'm': # merge
- size = -1
- elif entry[0] == 'n' and entry[2] == -2: # other parent
- size = -2
- self._otherparentset.add(f)
+ if self._pl[1] != nullid:
+ entry = self._map.get(f)
+ if entry is not None:
+ # backup the previous state
+ if entry[0] == 'm': # merge
+ size = -1
+ elif entry[0] == 'n' and entry[2] == -2: # other parent
+ size = -2
+ self._otherparentset.add(f)
self._map[f] = dirstatetuple('r', 0, size, 0)
self._nonnormalset.add(f)
- if size == 0 and f in self._copymap:
- del self._copymap[f]
+ if size == 0:
+ self._map.copymap.pop(f, None)
def merge(self, f):
'''Mark a file merged.'''
@@ -656,8 +596,7 @@
del self._map[f]
if f in self._nonnormalset:
self._nonnormalset.remove(f)
- if f in self._copymap:
- del self._copymap[f]
+ self._map.copymap.pop(f, None)
def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
if exists is None:
@@ -734,13 +673,12 @@
return path
def clear(self):
- self._map = {}
+ self._map = dirstatemap(self._ui, self._opener, self._root)
self._nonnormalset = set()
self._otherparentset = set()
if "_dirs" in self.__dict__:
delattr(self, "_dirs")
- self._copymap = {}
- self._pl = [nullid, nullid]
+ self._map.setparents(nullid, nullid)
self._lastnormaltime = 0
self._updatedfiles.clear()
self._dirty = True
@@ -755,7 +693,7 @@
if self._origpl is None:
self._origpl = self._pl
- self._pl = (parent, nullid)
+ self._map.setparents(parent, nullid)
for f in changedfiles:
if f in allfiles:
self.normallookup(f)
@@ -843,11 +781,12 @@
now = end # trust our estimate that the end is near now
break
- st.write(parsers.pack_dirstate(self._map, self._copymap, self._pl, now))
- self._nonnormalset, self._otherparentset = nonnormalentries(self._map)
+ st.write(parsers.pack_dirstate(self._map._map, self._map.copymap,
+ self._pl, now))
+ self._nonnormalset, self._otherparentset = self._map.nonnormalentries()
st.close()
self._lastnormaltime = 0
- self._dirty = self._dirtypl = False
+ self._dirty = self._map._dirtyparents = False
def _dirignore(self, f):
if f == '.':
@@ -982,13 +921,13 @@
results[nf] = None
else: # does it match a missing directory?
if alldirs is None:
- alldirs = util.dirs(dmap)
+ alldirs = util.dirs(dmap._map)
if nf in alldirs:
if matchedir:
matchedir(nf)
notfoundadd(nf)
else:
- badfn(ff, inst.strerror)
+ badfn(ff, encoding.strtolocal(inst.strerror))
# Case insensitive filesystems cannot rely on lstat() failing to detect
# a case-only rename. Prune the stat object for any file that does not
@@ -1094,7 +1033,8 @@
entries = listdir(join(nd), stat=True, skip=skip)
except OSError as inst:
if inst.errno in (errno.EACCES, errno.ENOENT):
- match.bad(self.pathto(nd), inst.strerror)
+ match.bad(self.pathto(nd),
+ encoding.strtolocal(inst.strerror))
continue
raise
for f, kind, st in entries:
@@ -1216,7 +1156,7 @@
mexact = match.exact
dirignore = self._dirignore
checkexec = self._checkexec
- copymap = self._copymap
+ copymap = self._map.copymap
lastnormaltime = self._lastnormaltime
# We need to do full walks when either
@@ -1341,3 +1281,118 @@
def clearbackup(self, tr, backupname):
'''Clear backup file'''
self._opener.unlink(backupname)
+
+class dirstatemap(object):
+ def __init__(self, ui, opener, root):
+ self._ui = ui
+ self._opener = opener
+ self._root = root
+ self._filename = 'dirstate'
+
+ self._map = {}
+ self.copymap = {}
+ self._parents = None
+ self._dirtyparents = False
+
+ # for consistent view between _pl() and _read() invocations
+ self._pendingmode = None
+
+ def iteritems(self):
+ return self._map.iteritems()
+
+ def __iter__(self):
+ return iter(self._map)
+
+ def get(self, key, default=None):
+ return self._map.get(key, default)
+
+ def __contains__(self, key):
+ return key in self._map
+
+ def __setitem__(self, key, value):
+ self._map[key] = value
+
+ def __getitem__(self, key):
+ return self._map[key]
+
+ def __delitem__(self, key):
+ del self._map[key]
+
+ def keys(self):
+ return self._map.keys()
+
+ def nonnormalentries(self):
+ '''Compute the nonnormal dirstate entries from the dmap'''
+ try:
+ return parsers.nonnormalotherparententries(self._map)
+ except AttributeError:
+ nonnorm = set()
+ otherparent = set()
+ for fname, e in self._map.iteritems():
+ if e[0] != 'n' or e[3] == -1:
+ nonnorm.add(fname)
+ if e[0] == 'n' and e[2] == -2:
+ otherparent.add(fname)
+ return nonnorm, otherparent
+
+ def filefoldmap(self):
+ """Returns a dictionary mapping normalized case paths to their
+ non-normalized versions.
+ """
+ try:
+ makefilefoldmap = parsers.make_file_foldmap
+ except AttributeError:
+ pass
+ else:
+ return makefilefoldmap(self._map, util.normcasespec,
+ util.normcasefallback)
+
+ f = {}
+ normcase = util.normcase
+ for name, s in self._map.iteritems():
+ if s[0] != 'r':
+ f[normcase(name)] = name
+ f['.'] = '.' # prevents useless util.fspath() invocation
+ return f
+
+ def dirs(self):
+ """Returns a set-like object containing all the directories in the
+ current dirstate.
+ """
+ return util.dirs(self._map, 'r')
+
+ def _opendirstatefile(self):
+ fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
+ if self._pendingmode is not None and self._pendingmode != mode:
+ fp.close()
+ raise error.Abort(_('working directory state may be '
+ 'changed parallelly'))
+ self._pendingmode = mode
+ return fp
+
+ def parents(self):
+ if not self._parents:
+ try:
+ fp = self._opendirstatefile()
+ st = fp.read(40)
+ fp.close()
+ except IOError as err:
+ if err.errno != errno.ENOENT:
+ raise
+ # File doesn't exist, so the current state is empty
+ st = ''
+
+ l = len(st)
+ if l == 40:
+ self._parents = st[:20], st[20:40]
+ elif l == 0:
+ self._parents = [nullid, nullid]
+ else:
+ raise error.Abort(_('working directory state appears '
+ 'damaged!'))
+
+ return self._parents
+
+ def setparents(self, p1, p2):
+ self._parents = (p1, p2)
+ self._dirtyparents = True
--- a/mercurial/dirstateguard.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/dirstateguard.py Sat Sep 30 07:52:48 2017 -0700
@@ -11,9 +11,10 @@
from . import (
error,
+ util,
)
-class dirstateguard(object):
+class dirstateguard(util.transactional):
'''Restore dirstate at unexpected failure.
At the construction, this class does:
--- a/mercurial/dispatch.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/dispatch.py Sat Sep 30 07:52:48 2017 -0700
@@ -88,7 +88,8 @@
status = -1
if util.safehasattr(req.ui, 'ferr'):
if err is not None and err.errno != errno.EPIPE:
- req.ui.ferr.write('abort: %s\n' % err.strerror)
+ req.ui.ferr.write('abort: %s\n' %
+ encoding.strtolocal(err.strerror))
req.ui.ferr.flush()
sys.exit(status & 255)
@@ -356,7 +357,10 @@
return -1
def aliasargs(fn, givenargs):
- args = getattr(fn, 'args', [])
+ args = []
+ # only care about alias 'args', ignore 'args' set by extensions.wrapfunction
+ if not util.safehasattr(fn, '_origfunc'):
+ args = getattr(fn, 'args', args)
if args:
cmd = ' '.join(map(util.shellquote, args))
@@ -519,23 +523,52 @@
ui.debug("alias '%s' expands to '%s'\n" % (self.name, args))
raise
+class lazyaliasentry(object):
+ """like a typical command entry (func, opts, help), but is lazy"""
+
+ def __init__(self, name, definition, cmdtable, source):
+ self.name = name
+ self.definition = definition
+ self.cmdtable = cmdtable.copy()
+ self.source = source
+
+ @util.propertycache
+ def _aliasdef(self):
+ return cmdalias(self.name, self.definition, self.cmdtable, self.source)
+
+ def __getitem__(self, n):
+ aliasdef = self._aliasdef
+ if n == 0:
+ return aliasdef
+ elif n == 1:
+ return aliasdef.opts
+ elif n == 2:
+ return aliasdef.help
+ else:
+ raise IndexError
+
+ def __iter__(self):
+ for i in range(3):
+ yield self[i]
+
+ def __len__(self):
+ return 3
+
def addaliases(ui, cmdtable):
# aliases are processed after extensions have been loaded, so they
# may use extension commands. Aliases can also use other alias definitions,
# but only if they have been defined prior to the current definition.
for alias, definition in ui.configitems('alias'):
- source = ui.configsource('alias', alias)
- aliasdef = cmdalias(alias, definition, cmdtable, source)
-
try:
- olddef = cmdtable[aliasdef.cmd][0]
- if olddef.definition == aliasdef.definition:
+ if cmdtable[alias].definition == definition:
continue
except (KeyError, AttributeError):
# definition might not exist or it might not be a cmdalias
pass
- cmdtable[aliasdef.name] = (aliasdef, aliasdef.opts, aliasdef.help)
+ source = ui.configsource('alias', alias)
+ entry = lazyaliasentry(alias, definition, cmdtable, source)
+ cmdtable[alias] = entry
def _parse(ui, args):
options = {}
@@ -603,20 +636,20 @@
The values are listed in the order they appear in args.
The options and values are removed from args.
- >>> args = ['x', '--cwd', 'foo', 'y']
- >>> _earlygetopt(['--cwd'], args), args
+ >>> args = [b'x', b'--cwd', b'foo', b'y']
+ >>> _earlygetopt([b'--cwd'], args), args
(['foo'], ['x', 'y'])
- >>> args = ['x', '--cwd=bar', 'y']
- >>> _earlygetopt(['--cwd'], args), args
+ >>> args = [b'x', b'--cwd=bar', b'y']
+ >>> _earlygetopt([b'--cwd'], args), args
(['bar'], ['x', 'y'])
- >>> args = ['x', '-R', 'foo', 'y']
- >>> _earlygetopt(['-R'], args), args
+ >>> args = [b'x', b'-R', b'foo', b'y']
+ >>> _earlygetopt([b'-R'], args), args
(['foo'], ['x', 'y'])
- >>> args = ['x', '-Rbar', 'y']
- >>> _earlygetopt(['-R'], args), args
+ >>> args = [b'x', b'-Rbar', b'y']
+ >>> _earlygetopt([b'-R'], args), args
(['bar'], ['x', 'y'])
"""
try:
@@ -676,7 +709,7 @@
wd = pycompat.getcwd()
except OSError as e:
raise error.Abort(_("error getting current working directory: %s") %
- e.strerror)
+ encoding.strtolocal(e.strerror))
path = cmdutil.findrepo(wd) or ""
if not path:
lui = ui
--- a/mercurial/encoding.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/encoding.py Sat Sep 30 07:52:48 2017 -0700
@@ -5,9 +5,9 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
+from __future__ import absolute_import, print_function
-import array
+import io
import locale
import os
import unicodedata
@@ -18,6 +18,17 @@
pycompat,
)
+from .pure import (
+ charencode as charencodepure,
+)
+
+charencode = policy.importmod(r'charencode')
+
+isasciistr = charencode.isasciistr
+asciilower = charencode.asciilower
+asciiupper = charencode.asciiupper
+_jsonescapeu8fast = charencode.jsonescapeu8fast
+
_sysstr = pycompat.sysstr
if pycompat.ispy3:
@@ -73,11 +84,11 @@
encodingmode = environ.get("HGENCODINGMODE", "strict")
fallbackencoding = 'ISO-8859-1'
-class localstr(str):
+class localstr(bytes):
'''This class allows strings that are unmodified to be
round-tripped to the local encoding and back'''
def __new__(cls, u, l):
- s = str.__new__(cls, l)
+ s = bytes.__new__(cls, l)
s._utf8 = u
return s
def __hash__(self):
@@ -97,19 +108,19 @@
strings next to their local representation to allow lossless
round-trip conversion back to UTF-8.
- >>> u = 'foo: \\xc3\\xa4' # utf-8
+ >>> u = b'foo: \\xc3\\xa4' # utf-8
>>> l = tolocal(u)
>>> l
'foo: ?'
>>> fromlocal(l)
'foo: \\xc3\\xa4'
- >>> u2 = 'foo: \\xc3\\xa1'
+ >>> u2 = b'foo: \\xc3\\xa1'
>>> d = { l: 1, tolocal(u2): 2 }
>>> len(d) # no collision
2
- >>> 'foo: ?' in d
+ >>> b'foo: ?' in d
False
- >>> l1 = 'foo: \\xe4' # historical latin1 fallback
+ >>> l1 = b'foo: \\xe4' # historical latin1 fallback
>>> l = tolocal(l1)
>>> l
'foo: ?'
@@ -117,6 +128,9 @@
'foo: \\xc3\\xa4'
"""
+ if isasciistr(s):
+ return s
+
try:
try:
# make sure string is actually stored in UTF-8
@@ -159,6 +173,8 @@
# can we do a lossless round-trip?
if isinstance(s, localstr):
return s._utf8
+ if isasciistr(s):
+ return s
try:
u = s.decode(_sysstr(encoding), _sysstr(encodingmode))
@@ -231,60 +247,63 @@
If 'leftside' is True, left side of string 's' is trimmed.
'ellipsis' is always placed at trimmed side.
- >>> ellipsis = '+++'
+ >>> from .node import bin
+ >>> def bprint(s):
+ ... print(pycompat.sysstr(s))
+ >>> ellipsis = b'+++'
>>> from . import encoding
- >>> encoding.encoding = 'utf-8'
- >>> t= '1234567890'
- >>> print trim(t, 12, ellipsis=ellipsis)
+ >>> encoding.encoding = b'utf-8'
+ >>> t = b'1234567890'
+ >>> bprint(trim(t, 12, ellipsis=ellipsis))
1234567890
- >>> print trim(t, 10, ellipsis=ellipsis)
+ >>> bprint(trim(t, 10, ellipsis=ellipsis))
1234567890
- >>> print trim(t, 8, ellipsis=ellipsis)
+ >>> bprint(trim(t, 8, ellipsis=ellipsis))
12345+++
- >>> print trim(t, 8, ellipsis=ellipsis, leftside=True)
+ >>> bprint(trim(t, 8, ellipsis=ellipsis, leftside=True))
+++67890
- >>> print trim(t, 8)
+ >>> bprint(trim(t, 8))
12345678
- >>> print trim(t, 8, leftside=True)
+ >>> bprint(trim(t, 8, leftside=True))
34567890
- >>> print trim(t, 3, ellipsis=ellipsis)
+ >>> bprint(trim(t, 3, ellipsis=ellipsis))
+++
- >>> print trim(t, 1, ellipsis=ellipsis)
+ >>> bprint(trim(t, 1, ellipsis=ellipsis))
+
>>> u = u'\u3042\u3044\u3046\u3048\u304a' # 2 x 5 = 10 columns
- >>> t = u.encode(encoding.encoding)
- >>> print trim(t, 12, ellipsis=ellipsis)
+ >>> t = u.encode(pycompat.sysstr(encoding.encoding))
+ >>> bprint(trim(t, 12, ellipsis=ellipsis))
\xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a
- >>> print trim(t, 10, ellipsis=ellipsis)
+ >>> bprint(trim(t, 10, ellipsis=ellipsis))
\xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a
- >>> print trim(t, 8, ellipsis=ellipsis)
+ >>> bprint(trim(t, 8, ellipsis=ellipsis))
\xe3\x81\x82\xe3\x81\x84+++
- >>> print trim(t, 8, ellipsis=ellipsis, leftside=True)
+ >>> bprint(trim(t, 8, ellipsis=ellipsis, leftside=True))
+++\xe3\x81\x88\xe3\x81\x8a
- >>> print trim(t, 5)
+ >>> bprint(trim(t, 5))
\xe3\x81\x82\xe3\x81\x84
- >>> print trim(t, 5, leftside=True)
+ >>> bprint(trim(t, 5, leftside=True))
\xe3\x81\x88\xe3\x81\x8a
- >>> print trim(t, 4, ellipsis=ellipsis)
+ >>> bprint(trim(t, 4, ellipsis=ellipsis))
+++
- >>> print trim(t, 4, ellipsis=ellipsis, leftside=True)
+ >>> bprint(trim(t, 4, ellipsis=ellipsis, leftside=True))
+++
- >>> t = '\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa' # invalid byte sequence
- >>> print trim(t, 12, ellipsis=ellipsis)
+ >>> t = bin(b'112233445566778899aa') # invalid byte sequence
+ >>> bprint(trim(t, 12, ellipsis=ellipsis))
\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa
- >>> print trim(t, 10, ellipsis=ellipsis)
+ >>> bprint(trim(t, 10, ellipsis=ellipsis))
\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa
- >>> print trim(t, 8, ellipsis=ellipsis)
+ >>> bprint(trim(t, 8, ellipsis=ellipsis))
\x11\x22\x33\x44\x55+++
- >>> print trim(t, 8, ellipsis=ellipsis, leftside=True)
+ >>> bprint(trim(t, 8, ellipsis=ellipsis, leftside=True))
+++\x66\x77\x88\x99\xaa
- >>> print trim(t, 8)
+ >>> bprint(trim(t, 8))
\x11\x22\x33\x44\x55\x66\x77\x88
- >>> print trim(t, 8, leftside=True)
+ >>> bprint(trim(t, 8, leftside=True))
\x33\x44\x55\x66\x77\x88\x99\xaa
- >>> print trim(t, 3, ellipsis=ellipsis)
+ >>> bprint(trim(t, 3, ellipsis=ellipsis))
+++
- >>> print trim(t, 1, ellipsis=ellipsis)
+ >>> bprint(trim(t, 1, ellipsis=ellipsis))
+
"""
try:
@@ -318,38 +337,6 @@
return concat(usub.encode(_sysstr(encoding)))
return ellipsis # no enough room for multi-column characters
-def _asciilower(s):
- '''convert a string to lowercase if ASCII
-
- Raises UnicodeDecodeError if non-ASCII characters are found.'''
- s.decode('ascii')
- return s.lower()
-
-def asciilower(s):
- # delay importing avoids cyclic dependency around "parsers" in
- # pure Python build (util => i18n => encoding => parsers => util)
- parsers = policy.importmod(r'parsers')
- impl = getattr(parsers, 'asciilower', _asciilower)
- global asciilower
- asciilower = impl
- return impl(s)
-
-def _asciiupper(s):
- '''convert a string to uppercase if ASCII
-
- Raises UnicodeDecodeError if non-ASCII characters are found.'''
- s.decode('ascii')
- return s.upper()
-
-def asciiupper(s):
- # delay importing avoids cyclic dependency around "parsers" in
- # pure Python build (util => i18n => encoding => parsers => util)
- parsers = policy.importmod(r'parsers')
- impl = getattr(parsers, 'asciiupper', _asciiupper)
- global asciiupper
- asciiupper = impl
- return impl(s)
-
def lower(s):
"best-effort encoding-aware case-folding of local string s"
try:
@@ -409,22 +396,6 @@
upper = 1
other = 0
-_jsonmap = []
-_jsonmap.extend("\\u%04x" % x for x in range(32))
-_jsonmap.extend(pycompat.bytechr(x) for x in range(32, 127))
-_jsonmap.append('\\u007f')
-_jsonmap[0x09] = '\\t'
-_jsonmap[0x0a] = '\\n'
-_jsonmap[0x22] = '\\"'
-_jsonmap[0x5c] = '\\\\'
-_jsonmap[0x08] = '\\b'
-_jsonmap[0x0c] = '\\f'
-_jsonmap[0x0d] = '\\r'
-_paranoidjsonmap = _jsonmap[:]
-_paranoidjsonmap[0x3c] = '\\u003c' # '<' (e.g. escape "</script>")
-_paranoidjsonmap[0x3e] = '\\u003e' # '>'
-_jsonmap.extend(pycompat.bytechr(x) for x in range(128, 256))
-
def jsonescape(s, paranoid=False):
'''returns a string suitable for JSON
@@ -438,48 +409,51 @@
(escapes are doubled in these tests)
- >>> jsonescape('this is a test')
+ >>> jsonescape(b'this is a test')
'this is a test'
- >>> jsonescape('escape characters: \\0 \\x0b \\x7f')
+ >>> jsonescape(b'escape characters: \\0 \\x0b \\x7f')
'escape characters: \\\\u0000 \\\\u000b \\\\u007f'
- >>> jsonescape('escape characters: \\t \\n \\r \\" \\\\')
- 'escape characters: \\\\t \\\\n \\\\r \\\\" \\\\\\\\'
- >>> jsonescape('a weird byte: \\xdd')
+ >>> jsonescape(b'escape characters: \\b \\t \\n \\f \\r \\" \\\\')
+ 'escape characters: \\\\b \\\\t \\\\n \\\\f \\\\r \\\\" \\\\\\\\'
+ >>> jsonescape(b'a weird byte: \\xdd')
'a weird byte: \\xed\\xb3\\x9d'
- >>> jsonescape('utf-8: caf\\xc3\\xa9')
+ >>> jsonescape(b'utf-8: caf\\xc3\\xa9')
'utf-8: caf\\xc3\\xa9'
- >>> jsonescape('')
+ >>> jsonescape(b'')
''
If paranoid, non-ascii and common troublesome characters are also escaped.
This is suitable for web output.
- >>> jsonescape('escape boundary: \\x7e \\x7f \\xc2\\x80', paranoid=True)
+ >>> s = b'escape characters: \\0 \\x0b \\x7f'
+ >>> assert jsonescape(s) == jsonescape(s, paranoid=True)
+ >>> s = b'escape characters: \\b \\t \\n \\f \\r \\" \\\\'
+ >>> assert jsonescape(s) == jsonescape(s, paranoid=True)
+ >>> jsonescape(b'escape boundary: \\x7e \\x7f \\xc2\\x80', paranoid=True)
'escape boundary: ~ \\\\u007f \\\\u0080'
- >>> jsonescape('a weird byte: \\xdd', paranoid=True)
+ >>> jsonescape(b'a weird byte: \\xdd', paranoid=True)
'a weird byte: \\\\udcdd'
- >>> jsonescape('utf-8: caf\\xc3\\xa9', paranoid=True)
+ >>> jsonescape(b'utf-8: caf\\xc3\\xa9', paranoid=True)
'utf-8: caf\\\\u00e9'
- >>> jsonescape('non-BMP: \\xf0\\x9d\\x84\\x9e', paranoid=True)
+ >>> jsonescape(b'non-BMP: \\xf0\\x9d\\x84\\x9e', paranoid=True)
'non-BMP: \\\\ud834\\\\udd1e'
- >>> jsonescape('<foo@example.org>', paranoid=True)
+ >>> jsonescape(b'<foo@example.org>', paranoid=True)
'\\\\u003cfoo@example.org\\\\u003e'
'''
- if paranoid:
- jm = _paranoidjsonmap
- else:
- jm = _jsonmap
-
u8chars = toutf8b(s)
try:
- return ''.join(jm[x] for x in bytearray(u8chars)) # fast path
- except IndexError:
+ return _jsonescapeu8fast(u8chars, paranoid)
+ except ValueError:
pass
- # non-BMP char is represented as UTF-16 surrogate pair
- u16codes = array.array('H', u8chars.decode('utf-8').encode('utf-16'))
- u16codes.pop(0) # drop BOM
- return ''.join(jm[x] if x < 128 else '\\u%04x' % x for x in u16codes)
+ return charencodepure.jsonescapeu8fallback(u8chars, paranoid)
+
+# We need to decode/encode U+DCxx codes transparently since invalid UTF-8
+# bytes are mapped to that range.
+if pycompat.ispy3:
+ _utf8strict = r'surrogatepass'
+else:
+ _utf8strict = r'strict'
_utf8len = [0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 3, 4]
@@ -491,13 +465,13 @@
'''
# find how many bytes to attempt decoding from first nibble
- l = _utf8len[ord(s[pos]) >> 4]
+ l = _utf8len[ord(s[pos:pos + 1]) >> 4]
if not l: # ascii
- return s[pos]
+ return s[pos:pos + 1]
c = s[pos:pos + l]
# validate with attempted decode
- c.decode("utf-8")
+ c.decode("utf-8", _utf8strict)
return c
def toutf8b(s):
@@ -530,15 +504,18 @@
internal surrogate encoding as a UTF-8 string.)
'''
+ if not isinstance(s, localstr) and isasciistr(s):
+ return s
if "\xed" not in s:
if isinstance(s, localstr):
return s._utf8
try:
- s.decode('utf-8')
+ s.decode('utf-8', _utf8strict)
return s
except UnicodeDecodeError:
pass
+ s = pycompat.bytestr(s)
r = ""
pos = 0
l = len(s)
@@ -547,12 +524,12 @@
c = getutf8char(s, pos)
if "\xed\xb0\x80" <= c <= "\xed\xb3\xbf":
# have to re-escape existing U+DCxx characters
- c = unichr(0xdc00 + ord(s[pos])).encode('utf-8')
+ c = unichr(0xdc00 + ord(s[pos])).encode('utf-8', _utf8strict)
pos += 1
else:
pos += len(c)
except UnicodeDecodeError:
- c = unichr(0xdc00 + ord(s[pos])).encode('utf-8')
+ c = unichr(0xdc00 + ord(s[pos])).encode('utf-8', _utf8strict)
pos += 1
r += c
return r
@@ -565,21 +542,23 @@
that's was passed through tolocal will remain in UTF-8.
>>> roundtrip = lambda x: fromutf8b(toutf8b(x)) == x
- >>> m = "\\xc3\\xa9\\x99abcd"
+ >>> m = b"\\xc3\\xa9\\x99abcd"
>>> toutf8b(m)
'\\xc3\\xa9\\xed\\xb2\\x99abcd'
>>> roundtrip(m)
True
- >>> roundtrip("\\xc2\\xc2\\x80")
+ >>> roundtrip(b"\\xc2\\xc2\\x80")
True
- >>> roundtrip("\\xef\\xbf\\xbd")
+ >>> roundtrip(b"\\xef\\xbf\\xbd")
True
- >>> roundtrip("\\xef\\xef\\xbf\\xbd")
+ >>> roundtrip(b"\\xef\\xef\\xbf\\xbd")
True
- >>> roundtrip("\\xf1\\x80\\x80\\x80\\x80")
+ >>> roundtrip(b"\\xf1\\x80\\x80\\x80\\x80")
True
'''
+ if isasciistr(s):
+ return s
# fast path - look for uDxxx prefixes in s
if "\xed" not in s:
return s
@@ -589,6 +568,7 @@
# points to be escaped. Instead, we use our handy getutf8char
# helper again to walk the string without "decoding" it.
+ s = pycompat.bytestr(s)
r = ""
pos = 0
l = len(s)
@@ -597,6 +577,21 @@
pos += len(c)
# unescape U+DCxx characters
if "\xed\xb0\x80" <= c <= "\xed\xb3\xbf":
- c = chr(ord(c.decode("utf-8")) & 0xff)
+ c = pycompat.bytechr(ord(c.decode("utf-8", _utf8strict)) & 0xff)
r += c
return r
+
+if pycompat.ispy3:
+ class strio(io.TextIOWrapper):
+ """Wrapper around TextIOWrapper that respects hg's encoding assumptions.
+
+ Also works around Python closing streams.
+ """
+
+ def __init__(self, buffer):
+ super(strio, self).__init__(buffer, encoding=_sysstr(encoding))
+
+ def __del__(self):
+ """Override __del__ so it doesn't close the underlying stream."""
+else:
+ strio = pycompat.identity
--- a/mercurial/error.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/error.py Sat Sep 30 07:52:48 2017 -0700
@@ -115,6 +115,9 @@
"""Raised when parsing config files and {rev,file}sets (msg[, pos])"""
__bytes__ = _tobytes
+class PatchError(Exception):
+ __bytes__ = _tobytes
+
class UnknownIdentifier(ParseError):
"""Exception raised when a {rev,file}set references an unknown identifier"""
--- a/mercurial/exchange.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/exchange.py Sat Sep 30 07:52:48 2017 -0700
@@ -7,6 +7,7 @@
from __future__ import absolute_import
+import collections
import errno
import hashlib
@@ -294,7 +295,7 @@
"""
def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
- bookmarks=()):
+ bookmarks=(), pushvars=None):
# repo we push from
self.repo = repo
self.ui = repo.ui
@@ -308,8 +309,6 @@
self.bookmarks = bookmarks
# allow push of new branch
self.newbranch = newbranch
- # did a local lock get acquired?
- self.locallocked = None
# step already performed
# (used to check what steps have been already performed through bundle2)
self.stepsdone = set()
@@ -354,6 +353,8 @@
# map { pushkey partid -> callback handling failure}
# used to handle exception from mandatory pushkey part failure
self.pkfailcb = {}
+ # an iterable of pushvars or None
+ self.pushvars = pushvars
@util.propertycache
def futureheads(self):
@@ -423,7 +424,7 @@
if opargs is None:
opargs = {}
pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
- **opargs)
+ **pycompat.strkwargs(opargs))
if pushop.remote.local():
missing = (set(pushop.repo.requirements)
- pushop.remote.local().supported)
@@ -433,28 +434,26 @@
" %s") % (', '.join(sorted(missing)))
raise error.Abort(msg)
- # there are two ways to push to remote repo:
- #
- # addchangegroup assumes local user can lock remote
- # repo (local filesystem, old ssh servers).
- #
- # unbundle assumes local user cannot lock remote repo (new ssh
- # servers, http servers).
-
if not pushop.remote.canpush():
raise error.Abort(_("destination does not support push"))
- # get local lock as we might write phase data
- localwlock = locallock = None
+
+ if not pushop.remote.capable('unbundle'):
+ raise error.Abort(_('cannot push: destination does not support the '
+ 'unbundle wire protocol command'))
+
+ # get lock as we might write phase data
+ wlock = lock = None
try:
# bundle2 push may receive a reply bundle touching bookmarks or other
# things requiring the wlock. Take it now to ensure proper ordering.
maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
if (not _forcebundle1(pushop)) and maypushback:
- localwlock = pushop.repo.wlock()
- locallock = pushop.repo.lock()
- pushop.locallocked = True
+ wlock = pushop.repo.wlock()
+ lock = pushop.repo.lock()
+ pushop.trmanager = transactionmanager(pushop.repo,
+ 'push-response',
+ pushop.remote.url())
except IOError as err:
- pushop.locallocked = False
if err.errno != errno.EACCES:
raise
# source repo cannot be locked.
@@ -462,36 +461,18 @@
# synchronisation.
msg = 'cannot lock source repository: %s\n' % err
pushop.ui.debug(msg)
- try:
- if pushop.locallocked:
- pushop.trmanager = transactionmanager(pushop.repo,
- 'push-response',
- pushop.remote.url())
+
+ with wlock or util.nullcontextmanager(), \
+ lock or util.nullcontextmanager(), \
+ pushop.trmanager or util.nullcontextmanager():
pushop.repo.checkpush(pushop)
- lock = None
- unbundle = pushop.remote.capable('unbundle')
- if not unbundle:
- lock = pushop.remote.lock()
- try:
- _pushdiscovery(pushop)
- if not _forcebundle1(pushop):
- _pushbundle2(pushop)
- _pushchangeset(pushop)
- _pushsyncphase(pushop)
- _pushobsolete(pushop)
- _pushbookmark(pushop)
- finally:
- if lock is not None:
- lock.release()
- if pushop.trmanager:
- pushop.trmanager.close()
- finally:
- if pushop.trmanager:
- pushop.trmanager.release()
- if locallock is not None:
- locallock.release()
- if localwlock is not None:
- localwlock.release()
+ _pushdiscovery(pushop)
+ if not _forcebundle1(pushop):
+ _pushbundle2(pushop)
+ _pushchangeset(pushop)
+ _pushsyncphase(pushop)
+ _pushobsolete(pushop)
+ _pushbookmark(pushop)
return pushop
@@ -677,9 +658,11 @@
if unfi.obsstore:
# this message are here for 80 char limit reason
mso = _("push includes obsolete changeset: %s!")
- mst = {"unstable": _("push includes unstable changeset: %s!"),
- "bumped": _("push includes bumped changeset: %s!"),
- "divergent": _("push includes divergent changeset: %s!")}
+ mspd = _("push includes phase-divergent changeset: %s!")
+ mscd = _("push includes content-divergent changeset: %s!")
+ mst = {"orphan": _("push includes orphan changeset: %s!"),
+ "phase-divergent": mspd,
+ "content-divergent": mscd}
# If we are to push if there is at least one
# obsolete or unstable changeset in missing, at
# least one of the missinghead will be obsolete or
@@ -688,8 +671,10 @@
ctx = unfi[node]
if ctx.obsolete():
raise error.Abort(mso % ctx)
- elif ctx.troubled():
- raise error.Abort(mst[ctx.troubles()[0]] % ctx)
+ elif ctx.isunstable():
+ # TODO print more than one instability in the abort
+ # message
+ raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
discovery.checkheads(pushop)
return True
@@ -771,10 +756,9 @@
if not cgversions:
raise ValueError(_('no common changegroup version'))
version = max(cgversions)
- cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
- pushop.outgoing,
- version=version)
- cgpart = bundler.newpart('changegroup', data=cg)
+ cgstream = changegroup.makestream(pushop.repo, pushop.outgoing, version,
+ 'push')
+ cgpart = bundler.newpart('changegroup', data=cgstream)
if cgversions:
cgpart.addparam('version', version)
if 'treemanifest' in pushop.repo.requirements:
@@ -808,8 +792,8 @@
part = bundler.newpart('pushkey')
part.addparam('namespace', enc('phases'))
part.addparam('key', enc(newremotehead.hex()))
- part.addparam('old', enc(str(phases.draft)))
- part.addparam('new', enc(str(phases.public)))
+ part.addparam('old', enc('%d' % phases.draft))
+ part.addparam('new', enc('%d' % phases.public))
part2node.append((part.id, newremotehead))
pushop.pkfailcb[part.id] = handlefailure
@@ -891,6 +875,24 @@
pushop.bkresult = 1
return handlereply
+@b2partsgenerator('pushvars', idx=0)
+def _getbundlesendvars(pushop, bundler):
+ '''send shellvars via bundle2'''
+ pushvars = pushop.pushvars
+ if pushvars:
+ shellvars = {}
+ for raw in pushvars:
+ if '=' not in raw:
+ msg = ("unable to parse variable '%s', should follow "
+ "'KEY=VALUE' or 'KEY=' format")
+ raise error.Abort(msg % raw)
+ k, v = raw.split('=', 1)
+ shellvars[k] = v
+
+ part = bundler.newpart('pushvars')
+
+ for key, value in shellvars.iteritems():
+ part.addparam(key, value, mandatory=False)
def _pushbundle2(pushop):
"""push data to the remote using bundle2
@@ -948,9 +950,12 @@
pushop.stepsdone.add('changesets')
if not _pushcheckoutgoing(pushop):
return
+
+ # Should have verified this in push().
+ assert pushop.remote.capable('unbundle')
+
pushop.repo.prepushoutgoinghooks(pushop)
outgoing = pushop.outgoing
- unbundle = pushop.remote.capable('unbundle')
# TODO: get bundlecaps from remote
bundlecaps = None
# create a changegroup from local
@@ -958,35 +963,25 @@
or pushop.repo.changelog.filteredrevs):
# push everything,
# use the fast path, no race possible on push
- bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
- cg = changegroup.getsubset(pushop.repo,
- outgoing,
- bundler,
- 'push',
- fastpath=True)
+ cg = changegroup.makechangegroup(pushop.repo, outgoing, '01', 'push',
+ fastpath=True, bundlecaps=bundlecaps)
else:
- cg = changegroup.getchangegroup(pushop.repo, 'push', outgoing,
- bundlecaps=bundlecaps)
+ cg = changegroup.makechangegroup(pushop.repo, outgoing, '01',
+ 'push', bundlecaps=bundlecaps)
# apply changegroup to remote
- if unbundle:
- # local repo finds heads on server, finds out what
- # revs it must push. once revs transferred, if server
- # finds it has different heads (someone else won
- # commit/push race), server aborts.
- if pushop.force:
- remoteheads = ['force']
- else:
- remoteheads = pushop.remoteheads
- # ssh: return remote's addchangegroup()
- # http: return remote's addchangegroup() or 0 for error
- pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
- pushop.repo.url())
+ # local repo finds heads on server, finds out what
+ # revs it must push. once revs transferred, if server
+ # finds it has different heads (someone else won
+ # commit/push race), server aborts.
+ if pushop.force:
+ remoteheads = ['force']
else:
- # we return an integer indicating remote head count
- # change
- pushop.cgresult = pushop.remote.addchangegroup(cg, 'push',
- pushop.repo.url())
+ remoteheads = pushop.remoteheads
+ # ssh: return remote's addchangegroup()
+ # http: return remote's addchangegroup() or 0 for error
+ pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
+ pushop.repo.url())
def _pushsyncphase(pushop):
"""synchronise phase information locally and remotely"""
@@ -1173,7 +1168,7 @@
# deprecated; talk to trmanager directly
return self.trmanager.transaction()
-class transactionmanager(object):
+class transactionmanager(util.transactional):
"""An object to manage the life cycle of a transaction
It creates the transaction on demand and calls the appropriate hooks when
@@ -1229,8 +1224,10 @@
opargs = {}
pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
streamclonerequested=streamclonerequested, **opargs)
- if pullop.remote.local():
- missing = set(pullop.remote.requirements) - pullop.repo.supported
+
+ peerlocal = pullop.remote.local()
+ if peerlocal:
+ missing = set(peerlocal.requirements) - pullop.repo.supported
if missing:
msg = _("required features are not"
" supported in the destination:"
@@ -1242,10 +1239,10 @@
wlock = pullop.repo.wlock()
lock = pullop.repo.lock()
pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
- streamclone.maybeperformlegacystreamclone(pullop)
# This should ideally be in _pullbundle2(). However, it needs to run
# before discovery to avoid extra work.
_maybeapplyclonebundle(pullop)
+ streamclone.maybeperformlegacystreamclone(pullop)
_pulldiscovery(pullop)
if pullop.canusebundle2:
_pullbundle2(pullop)
@@ -1317,8 +1314,7 @@
common, fetch, rheads = tmp
nm = pullop.repo.unfiltered().changelog.nodemap
if fetch and rheads:
- # If a remote heads in filtered locally, lets drop it from the unknown
- # remote heads and put in back in common.
+ # If a remote heads is filtered locally, put in back in common.
#
# This is a hackish solution to catch most of "common but locally
# hidden situation". We do not performs discovery on unfiltered
@@ -1328,16 +1324,12 @@
# If a set of such "common but filtered" changeset exist on the server
# but are not including a remote heads, we'll not be able to detect it,
scommon = set(common)
- filteredrheads = []
for n in rheads:
if n in nm:
if n not in scommon:
common.append(n)
- else:
- filteredrheads.append(n)
- if not filteredrheads:
+ if set(rheads).issubset(set(common)):
fetch = []
- rheads = filteredrheads
pullop.common = common
pullop.fetch = fetch
pullop.rheads = rheads
@@ -1358,12 +1350,21 @@
kwargs['common'] = pullop.common
kwargs['heads'] = pullop.heads or pullop.rheads
kwargs['cg'] = pullop.fetch
+
+ ui = pullop.repo.ui
+ legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
+ hasbinaryphase = 'heads' in pullop.remotebundle2caps.get('phases', ())
+ if (not legacyphase and hasbinaryphase):
+ kwargs['phases'] = True
+ pullop.stepsdone.add('phases')
+
if 'listkeys' in pullop.remotebundle2caps:
- kwargs['listkeys'] = ['phases']
+ if 'phases' not in pullop.stepsdone:
+ kwargs['listkeys'] = ['phases']
if pullop.remotebookmarks is None:
# make sure to always includes bookmark data when migrating
# `hg incoming --bundle` to using this function.
- kwargs['listkeys'].append('bookmarks')
+ kwargs.setdefault('listkeys', []).append('bookmarks')
# If this is a full pull / clone and the server supports the clone bundles
# feature, tell the server whether we attempted a clone bundle. The
@@ -1595,8 +1596,8 @@
raise ValueError(_('unsupported getbundle arguments: %s')
% ', '.join(sorted(kwargs.keys())))
outgoing = _computeoutgoing(repo, heads, common)
- bundler = changegroup.getbundler('01', repo, bundlecaps)
- return changegroup.getsubsetraw(repo, outgoing, bundler, source)
+ return changegroup.makestream(repo, outgoing, '01', source,
+ bundlecaps=bundlecaps)
# bundle20 case
b2caps = {}
@@ -1620,7 +1621,7 @@
def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
b2caps=None, heads=None, common=None, **kwargs):
"""add a changegroup part to the requested bundle"""
- cg = None
+ cgstream = None
if kwargs.get('cg', True):
# build changegroup bundle here.
version = '01'
@@ -1632,15 +1633,16 @@
raise ValueError(_('no common changegroup version'))
version = max(cgversions)
outgoing = _computeoutgoing(repo, heads, common)
- cg = changegroup.getlocalchangegroupraw(repo, source, outgoing,
- bundlecaps=bundlecaps,
- version=version)
+ if outgoing.missing:
+ cgstream = changegroup.makestream(repo, outgoing, version, source,
+ bundlecaps=bundlecaps)
- if cg:
- part = bundler.newpart('changegroup', data=cg)
+ if cgstream:
+ part = bundler.newpart('changegroup', data=cgstream)
if cgversions:
part.addparam('version', version)
- part.addparam('nbchanges', str(len(outgoing.missing)), mandatory=False)
+ part.addparam('nbchanges', '%d' % len(outgoing.missing),
+ mandatory=False)
if 'treemanifest' in repo.requirements:
part.addparam('treemanifest', '1')
@@ -1667,6 +1669,53 @@
markers = sorted(markers)
bundle2.buildobsmarkerspart(bundler, markers)
+@getbundle2partsgenerator('phases')
+def _getbundlephasespart(bundler, repo, source, bundlecaps=None,
+ b2caps=None, heads=None, **kwargs):
+ """add phase heads part to the requested bundle"""
+ if kwargs.get('phases', False):
+ if not 'heads' in b2caps.get('phases'):
+ raise ValueError(_('no common phases exchange method'))
+ if heads is None:
+ heads = repo.heads()
+
+ headsbyphase = collections.defaultdict(set)
+ if repo.publishing():
+ headsbyphase[phases.public] = heads
+ else:
+ # find the appropriate heads to move
+
+ phase = repo._phasecache.phase
+ node = repo.changelog.node
+ rev = repo.changelog.rev
+ for h in heads:
+ headsbyphase[phase(repo, rev(h))].add(h)
+ seenphases = list(headsbyphase.keys())
+
+ # We do not handle anything but public and draft phase for now)
+ if seenphases:
+ assert max(seenphases) <= phases.draft
+
+ # if client is pulling non-public changesets, we need to find
+ # intermediate public heads.
+ draftheads = headsbyphase.get(phases.draft, set())
+ if draftheads:
+ publicheads = headsbyphase.get(phases.public, set())
+
+ revset = 'heads(only(%ln, %ln) and public())'
+ extraheads = repo.revs(revset, draftheads, publicheads)
+ for r in extraheads:
+ headsbyphase[phases.public].add(node(r))
+
+ # transform data in a format used by the encoding function
+ phasemapping = []
+ for phase in phases.allphases:
+ phasemapping.append(sorted(headsbyphase[phase]))
+
+ # generate the actual part
+ phasedata = phases.binaryencode(phasemapping)
+ bundler.newpart('phase-heads', data=phasedata)
+
@getbundle2partsgenerator('hgtagsfnodes')
def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
b2caps=None, heads=None, common=None,
@@ -1816,7 +1865,9 @@
'falling back to regular clone\n'))
return
- entries = filterclonebundleentries(repo, entries)
+ entries = filterclonebundleentries(
+ repo, entries, streamclonerequested=pullop.streamclonerequested)
+
if not entries:
# There is a thundering herd concern here. However, if a server
# operator doesn't advertise bundles appropriate for its clients,
@@ -1885,7 +1936,7 @@
return m
-def filterclonebundleentries(repo, entries):
+def filterclonebundleentries(repo, entries, streamclonerequested=False):
"""Remove incompatible clone bundle manifest entries.
Accepts a list of entries parsed with ``parseclonebundlesmanifest``
@@ -1900,7 +1951,15 @@
spec = entry.get('BUNDLESPEC')
if spec:
try:
- parsebundlespec(repo, spec, strict=True)
+ comp, version, params = parsebundlespec(repo, spec, strict=True)
+
+ # If a stream clone was requested, filter out non-streamclone
+ # entries.
+ if streamclonerequested and (comp != 'UN' or version != 's1'):
+ repo.ui.debug('filtering %s because not a stream clone\n' %
+ entry['URL'])
+ continue
+
except error.InvalidBundleSpecification as e:
repo.ui.debug(str(e) + '\n')
continue
@@ -1908,6 +1967,12 @@
repo.ui.debug('filtering %s because unsupported bundle '
'spec: %s\n' % (entry['URL'], str(e)))
continue
+ # If we don't have a spec and requested a stream clone, we don't know
+ # what the entry is so don't attempt to apply it.
+ elif streamclonerequested:
+ repo.ui.debug('filtering %s because cannot determine if a stream '
+ 'clone bundle\n' % entry['URL'])
+ continue
if 'REQUIRESNI' in entry and not sslutil.hassni:
repo.ui.debug('filtering %s because SNI not supported\n' %
--- a/mercurial/extensions.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/extensions.py Sat Sep 30 07:52:48 2017 -0700
@@ -7,6 +7,7 @@
from __future__ import absolute_import
+import functools
import imp
import inspect
import os
@@ -19,7 +20,6 @@
from . import (
cmdutil,
configitems,
- encoding,
error,
pycompat,
util,
@@ -114,16 +114,11 @@
mod = _importh(name)
return mod
-def _forbytes(inst):
- """Portably format an import error into a form suitable for
- %-formatting into bytestrings."""
- return encoding.strtolocal(str(inst))
-
def _reportimporterror(ui, err, failed, next):
# note: this ui.debug happens before --debug is processed,
# Use --config ui.debug=1 to see them.
ui.debug('could not import %s (%s): trying %s\n'
- % (failed, _forbytes(err), next))
+ % (failed, util.forcebytestr(err), next))
if ui.debugflag:
ui.traceback()
@@ -139,6 +134,14 @@
"registrar.command to register '%s'" % c, '4.6')
missing = [a for a in _cmdfuncattrs if not util.safehasattr(f, a)]
if not missing:
+ for option in e[1]:
+ default = option[2]
+ if isinstance(default, type(u'')):
+ raise error.ProgrammingError(
+ "option '%s.%s' has a unicode default value"
+ % (c, option[1]),
+ hint=("change the %s.%s default value to a "
+ "non-unicode string" % (c, option[1])))
continue
raise error.ProgrammingError(
'missing attributes: %s' % ', '.join(missing),
@@ -180,7 +183,7 @@
uisetup(ui)
except Exception as inst:
ui.traceback()
- msg = _forbytes(inst)
+ msg = util.forcebytestr(inst)
ui.warn(_("*** failed to set up extension %s: %s\n") % (name, msg))
return False
return True
@@ -192,12 +195,16 @@
try:
extsetup(ui)
except TypeError:
- if inspect.getargspec(extsetup).args:
+ # Try to use getfullargspec (Python 3) first, and fall
+ # back to getargspec only if it doesn't exist so as to
+ # avoid warnings.
+ if getattr(inspect, 'getfullargspec',
+ getattr(inspect, 'getargspec'))(extsetup).args:
raise
extsetup() # old extsetup with no ui argument
except Exception as inst:
ui.traceback()
- msg = _forbytes(inst)
+ msg = util.forcebytestr(inst)
ui.warn(_("*** failed to set up extension %s: %s\n") % (name, msg))
return False
return True
@@ -215,7 +222,7 @@
try:
load(ui, name, path)
except Exception as inst:
- msg = _forbytes(inst)
+ msg = util.forcebytestr(inst)
if path:
ui.warn(_("*** failed to import extension %s from %s: %s\n")
% (name, path, msg))
@@ -225,6 +232,18 @@
if isinstance(inst, error.Hint) and inst.hint:
ui.warn(_("*** (%s)\n") % inst.hint)
ui.traceback()
+ # list of (objname, loadermod, loadername) tuple:
+ # - objname is the name of an object in extension module,
+ # from which extra information is loaded
+ # - loadermod is the module where loader is placed
+ # - loadername is the name of the function,
+ # which takes (ui, extensionname, extraobj) arguments
+ #
+ # This one is for the list of item that must be run before running any setup
+ earlyextraloaders = [
+ ('configtable', configitems, 'loadconfigtable'),
+ ]
+ _loadextra(ui, newindex, earlyextraloaders)
broken = set()
for name in _order[newindex:]:
@@ -256,6 +275,7 @@
from . import (
color,
commands,
+ filemerge,
fileset,
revset,
templatefilters,
@@ -272,14 +292,16 @@
extraloaders = [
('cmdtable', commands, 'loadcmdtable'),
('colortable', color, 'loadcolortable'),
- ('configtable', configitems, 'loadconfigtable'),
('filesetpredicate', fileset, 'loadpredicate'),
+ ('internalmerge', filemerge, 'loadinternalmerge'),
('revsetpredicate', revset, 'loadpredicate'),
('templatefilter', templatefilters, 'loadfilter'),
('templatefunc', templater, 'loadfunction'),
('templatekeyword', templatekw, 'loadkeyword'),
]
+ _loadextra(ui, newindex, extraloaders)
+def _loadextra(ui, newindex, extraloaders):
for name in _order[newindex:]:
module = _extensions[name]
if not module:
@@ -324,6 +346,10 @@
def _updatewrapper(wrap, origfn, unboundwrapper):
'''Copy and add some useful attributes to wrapper'''
+ try:
+ wrap.__name__ = origfn.__name__
+ except AttributeError:
+ pass
wrap.__module__ = getattr(origfn, '__module__')
wrap.__doc__ = getattr(origfn, '__doc__')
wrap.__dict__.update(getattr(origfn, '__dict__', {}))
@@ -367,7 +393,8 @@
break
origfn = entry[0]
- wrap = bind(util.checksignature(wrapper), util.checksignature(origfn))
+ wrap = functools.partial(util.checksignature(wrapper),
+ util.checksignature(origfn))
_updatewrapper(wrap, origfn, wrapper)
if docstring is not None:
wrap.__doc__ += docstring
@@ -384,6 +411,7 @@
These can't be wrapped using the normal wrapfunction.
"""
+ propname = pycompat.sysstr(propname)
assert callable(wrapper)
for currcls in cls.__mro__:
if propname in currcls.__dict__:
@@ -395,8 +423,23 @@
break
if currcls is object:
- raise AttributeError(
- _("type '%s' has no property '%s'") % (cls, propname))
+ raise AttributeError(r"type '%s' has no property '%s'" % (
+ cls, propname))
+
+class wrappedfunction(object):
+ '''context manager for temporarily wrapping a function'''
+
+ def __init__(self, container, funcname, wrapper):
+ assert callable(wrapper)
+ self._container = container
+ self._funcname = funcname
+ self._wrapper = wrapper
+
+ def __enter__(self):
+ wrapfunction(self._container, self._funcname, self._wrapper)
+
+ def __exit__(self, exctype, excvalue, traceback):
+ unwrapfunction(self._container, self._funcname, self._wrapper)
def wrapfunction(container, funcname, wrapper):
'''Wrap the function named funcname in container
@@ -435,7 +478,14 @@
origfn = getattr(container, funcname)
assert callable(origfn)
- wrap = bind(wrapper, origfn)
+ if inspect.ismodule(container):
+ # origfn is not an instance or class method. "partial" can be used.
+ # "partial" won't insert a frame in traceback.
+ wrap = functools.partial(wrapper, origfn)
+ else:
+ # "partial" cannot be safely used. Emulate its effect by using "bind".
+ # The downside is one more frame in traceback.
+ wrap = bind(wrapper, origfn)
_updatewrapper(wrap, origfn, wrapper)
setattr(container, funcname, wrap)
return origfn
--- a/mercurial/filelog.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/filelog.py Sat Sep 30 07:52:48 2017 -0700
@@ -31,7 +31,7 @@
return meta, (s + 2)
def packmeta(meta, text):
- keys = sorted(meta.iterkeys())
+ keys = sorted(meta)
metatext = "".join("%s: %s\n" % (k, meta[k]) for k in keys)
return "\1\n%s\1\n%s" % (metatext, text)
--- a/mercurial/filemerge.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/filemerge.py Sat Sep 30 07:52:48 2017 -0700
@@ -21,6 +21,7 @@
formatter,
match,
pycompat,
+ registrar,
scmutil,
simplemerge,
tagmerge,
@@ -44,10 +45,12 @@
# Merge tools to document.
internalsdoc = {}
+internaltool = registrar.internalmerge()
+
# internal tool merge types
-nomerge = None
-mergeonly = 'mergeonly' # just the full merge, no premerge
-fullmerge = 'fullmerge' # both premerge and merge
+nomerge = internaltool.nomerge
+mergeonly = internaltool.mergeonly # just the full merge, no premerge
+fullmerge = internaltool.fullmerge # both premerge and merge
_localchangedotherdeletedmsg = _(
"local%(l)s changed %(fd)s which other%(o)s deleted\n"
@@ -104,21 +107,6 @@
def isabsent(self):
return True
-def internaltool(name, mergetype, onfailure=None, precheck=None):
- '''return a decorator for populating internal merge tool table'''
- def decorator(func):
- fullname = ':' + name
- func.__doc__ = (pycompat.sysstr("``%s``\n" % fullname)
- + func.__doc__.strip())
- internals[fullname] = func
- internals['internal:' + name] = func
- internalsdoc[fullname] = func
- func.mergetype = mergetype
- func.onfailure = onfailure
- func.precheck = precheck
- return func
- return decorator
-
def _findtool(ui, tool):
if tool in internals:
return tool
@@ -330,7 +318,7 @@
tool, toolpath, binary, symlink = toolconf
if symlink or fcd.isabsent() or fco.isabsent():
return 1
- a, b, c, back = files
+ unused, unused, unused, back = files
ui = repo.ui
@@ -353,12 +341,13 @@
labels = _defaultconflictlabels
if len(labels) < 3:
labels.append('base')
- r = simplemerge.simplemerge(ui, a, b, c, quiet=True, label=labels)
+ r = simplemerge.simplemerge(ui, fcd, fca, fco, quiet=True, label=labels)
if not r:
ui.debug(" premerge successful\n")
return 0
if premerge not in validkeep:
- util.copyfile(back, a) # restore from backup and try again
+ # restore from backup and try again
+ _restorebackup(fcd, back)
return 1 # continue merging
def _mergecheck(repo, mynode, orig, fcd, fco, fca, toolconf):
@@ -379,11 +368,9 @@
files. It will fail if there are any conflicts and leave markers in
the partially merged file. Markers will have two sections, one for each side
of merge, unless mode equals 'union' which suppresses the markers."""
- a, b, c, back = files
-
ui = repo.ui
- r = simplemerge.simplemerge(ui, a, b, c, label=labels, mode=mode)
+ r = simplemerge.simplemerge(ui, fcd, fca, fco, label=labels, mode=mode)
return True, r, False
@internaltool('union', fullmerge,
@@ -434,8 +421,7 @@
"""
assert localorother is not None
tool, toolpath, binary, symlink = toolconf
- a, b, c, back = files
- r = simplemerge.simplemerge(repo.ui, a, b, c, label=labels,
+ r = simplemerge.simplemerge(repo.ui, fcd, fca, fco, label=labels,
localorother=localorother)
return True, r
@@ -479,11 +465,10 @@
This implies permerge. Therefore, files aren't dumped, if premerge
runs successfully. Use :forcedump to forcibly write files out.
"""
- a, b, c, back = files
-
+ a = _workingpath(repo, fcd)
fd = fcd.path()
- util.copyfile(a, a + ".local")
+ util.writefile(a + ".local", fcd.decodeddata())
repo.wwrite(fd + ".other", fco.data(), fco.flags())
repo.wwrite(fd + ".base", fca.data(), fca.flags())
return False, 1, False
@@ -503,33 +488,38 @@
repo.ui.warn(_('warning: %s cannot merge change/delete conflict '
'for %s\n') % (tool, fcd.path()))
return False, 1, None
- a, b, c, back = files
- out = ""
- env = {'HG_FILE': fcd.path(),
- 'HG_MY_NODE': short(mynode),
- 'HG_OTHER_NODE': str(fco.changectx()),
- 'HG_BASE_NODE': str(fca.changectx()),
- 'HG_MY_ISLINK': 'l' in fcd.flags(),
- 'HG_OTHER_ISLINK': 'l' in fco.flags(),
- 'HG_BASE_ISLINK': 'l' in fca.flags(),
- }
-
- ui = repo.ui
+ unused, unused, unused, back = files
+ a = _workingpath(repo, fcd)
+ b, c = _maketempfiles(repo, fco, fca)
+ try:
+ out = ""
+ env = {'HG_FILE': fcd.path(),
+ 'HG_MY_NODE': short(mynode),
+ 'HG_OTHER_NODE': str(fco.changectx()),
+ 'HG_BASE_NODE': str(fca.changectx()),
+ 'HG_MY_ISLINK': 'l' in fcd.flags(),
+ 'HG_OTHER_ISLINK': 'l' in fco.flags(),
+ 'HG_BASE_ISLINK': 'l' in fca.flags(),
+ }
+ ui = repo.ui
- args = _toolstr(ui, tool, "args", '$local $base $other')
- if "$output" in args:
- out, a = a, back # read input from backup, write to original
- replace = {'local': a, 'base': b, 'other': c, 'output': out}
- args = util.interpolate(r'\$', replace, args,
- lambda s: util.shellquote(util.localpath(s)))
- cmd = toolpath + ' ' + args
- if _toolbool(ui, tool, "gui"):
- repo.ui.status(_('running merge tool %s for file %s\n') %
- (tool, fcd.path()))
- repo.ui.debug('launching merge tool: %s\n' % cmd)
- r = ui.system(cmd, cwd=repo.root, environ=env, blockedtag='mergetool')
- repo.ui.debug('merge tool returned: %s\n' % r)
- return True, r, False
+ args = _toolstr(ui, tool, "args", '$local $base $other')
+ if "$output" in args:
+ out, a = a, back # read input from backup, write to original
+ replace = {'local': a, 'base': b, 'other': c, 'output': out}
+ args = util.interpolate(r'\$', replace, args,
+ lambda s: util.shellquote(util.localpath(s)))
+ cmd = toolpath + ' ' + args
+ if _toolbool(ui, tool, "gui"):
+ repo.ui.status(_('running merge tool %s for file %s\n') %
+ (tool, fcd.path()))
+ repo.ui.debug('launching merge tool: %s\n' % cmd)
+ r = ui.system(cmd, cwd=repo.root, environ=env, blockedtag='mergetool')
+ repo.ui.debug('merge tool returned: %s\n' % r)
+ return True, r, False
+ finally:
+ util.unlink(b)
+ util.unlink(c)
def _formatconflictmarker(repo, ctx, template, label, pad):
"""Applies the given template to the ctx, prefixed by the label.
@@ -595,7 +585,48 @@
"o": " [%s]" % labels[1],
}
-def _filemerge(premerge, repo, mynode, orig, fcd, fco, fca, labels=None):
+def _restorebackup(fcd, back):
+ # TODO: Add a workingfilectx.write(otherfilectx) path so we can use
+ # util.copy here instead.
+ fcd.write(util.readfile(back), fcd.flags())
+
+def _makebackup(repo, ui, fcd, premerge):
+ """Makes a backup of the local `fcd` file prior to merging.
+
+ In addition to preserving the user's pre-existing modifications to `fcd`
+ (if any), the backup is used to undo certain premerges, confirm whether a
+ merge changed anything, and determine what line endings the new file should
+ have.
+ """
+ if fcd.isabsent():
+ return None
+
+ a = _workingpath(repo, fcd)
+ back = scmutil.origpath(ui, repo, a)
+ if premerge:
+ util.copyfile(a, back)
+ return back
+
+def _maketempfiles(repo, fco, fca):
+ """Writes out `fco` and `fca` as temporary files, so an external merge
+ tool may use them.
+ """
+ def temp(prefix, ctx):
+ fullbase, ext = os.path.splitext(ctx.path())
+ pre = "%s~%s." % (os.path.basename(fullbase), prefix)
+ (fd, name) = tempfile.mkstemp(prefix=pre, suffix=ext)
+ data = repo.wwritedata(ctx.path(), ctx.data())
+ f = os.fdopen(fd, pycompat.sysstr("wb"))
+ f.write(data)
+ f.close()
+ return name
+
+ b = temp("base", fca)
+ c = temp("other", fco)
+
+ return b, c
+
+def _filemerge(premerge, repo, wctx, mynode, orig, fcd, fco, fca, labels=None):
"""perform a 3-way merge in the working directory
premerge = whether this is a premerge
@@ -608,16 +639,6 @@
Returns whether the merge is complete, the return value of the merge, and
a boolean indicating whether the file was deleted from disk."""
- def temp(prefix, ctx):
- fullbase, ext = os.path.splitext(ctx.path())
- pre = "%s~%s." % (os.path.basename(fullbase), prefix)
- (fd, name) = tempfile.mkstemp(prefix=pre, suffix=ext)
- data = repo.wwritedata(ctx.path(), ctx.data())
- f = os.fdopen(fd, pycompat.sysstr("wb"))
- f.write(data)
- f.close()
- return name
-
if not fco.cmp(fcd): # files identical?
return True, None, False
@@ -645,6 +666,11 @@
onfailure = _("merging %s failed!\n")
precheck = None
+ # If using deferred writes, must flush any deferred contents if running
+ # an external merge tool since it has arbitrary access to the working
+ # copy.
+ wctx.flushall()
+
toolconf = tool, toolpath, binary, symlink
if mergetype == nomerge:
@@ -665,17 +691,8 @@
ui.warn(onfailure % fd)
return True, 1, False
- a = repo.wjoin(fd)
- b = temp("base", fca)
- c = temp("other", fco)
- if not fcd.isabsent():
- back = scmutil.origpath(ui, repo, a)
- if premerge:
- util.copyfile(a, back)
- else:
- back = None
- files = (a, b, c, back)
-
+ back = _makebackup(repo, ui, fcd, premerge)
+ files = (None, None, None, back)
r = 1
try:
markerstyle = ui.config('ui', 'mergemarkers')
@@ -693,7 +710,7 @@
toolconf, files, labels=labels)
if needcheck:
- r = _check(r, ui, tool, fcd, files)
+ r = _check(repo, r, ui, tool, fcd, files)
if r:
if onfailure:
@@ -703,12 +720,10 @@
finally:
if not r and back is not None:
util.unlink(back)
- util.unlink(b)
- util.unlink(c)
-def _check(r, ui, tool, fcd, files):
+def _check(repo, r, ui, tool, fcd, files):
fd = fcd.path()
- a, b, c, back = files
+ unused, unused, unused, back = files
if not r and (_toolbool(ui, tool, "checkconflicts") or
'conflicts' in _toollist(ui, tool, "check")):
@@ -726,22 +741,39 @@
if not r and not checked and (_toolbool(ui, tool, "checkchanged") or
'changed' in
_toollist(ui, tool, "check")):
- if back is not None and filecmp.cmp(a, back):
+ if back is not None and filecmp.cmp(_workingpath(repo, fcd), back):
if ui.promptchoice(_(" output file %s appears unchanged\n"
"was merge successful (yn)?"
"$$ &Yes $$ &No") % fd, 1):
r = 1
if back is not None and _toolbool(ui, tool, "fixeol"):
- _matcheol(a, back)
+ _matcheol(_workingpath(repo, fcd), back)
return r
-def premerge(repo, mynode, orig, fcd, fco, fca, labels=None):
- return _filemerge(True, repo, mynode, orig, fcd, fco, fca, labels=labels)
+def _workingpath(repo, ctx):
+ return repo.wjoin(ctx.path())
+
+def premerge(repo, wctx, mynode, orig, fcd, fco, fca, labels=None):
+ return _filemerge(True, repo, wctx, mynode, orig, fcd, fco, fca,
+ labels=labels)
+
+def filemerge(repo, wctx, mynode, orig, fcd, fco, fca, labels=None):
+ return _filemerge(False, repo, wctx, mynode, orig, fcd, fco, fca,
+ labels=labels)
-def filemerge(repo, mynode, orig, fcd, fco, fca, labels=None):
- return _filemerge(False, repo, mynode, orig, fcd, fco, fca, labels=labels)
+def loadinternalmerge(ui, extname, registrarobj):
+ """Load internal merge tool from specified registrarobj
+ """
+ for name, func in registrarobj._table.iteritems():
+ fullname = ':' + name
+ internals[fullname] = func
+ internals['internal:' + name] = func
+ internalsdoc[fullname] = func
+
+# load built-in merge tools explicitly to setup internalsdoc
+loadinternalmerge(None, None, internaltool)
# tell hggettext to extract docstrings from these functions:
i18nfunctions = internals.values()
--- a/mercurial/formatter.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/formatter.py Sat Sep 30 07:52:48 2017 -0700
@@ -45,21 +45,25 @@
... import sys
... from . import ui as uimod
... ui = uimod.ui()
-... ui.fout = sys.stdout # redirect to doctest
... ui.verbose = verbose
-... return fn(ui, ui.formatter(fn.__name__, opts))
+... ui.pushbuffer()
+... try:
+... return fn(ui, ui.formatter(pycompat.sysbytes(fn.__name__),
+... pycompat.byteskwargs(opts)))
+... finally:
+... print(pycompat.sysstr(ui.popbuffer()), end='')
Basic example:
>>> def files(ui, fm):
-... files = [('foo', 123, (0, 0)), ('bar', 456, (1, 0))]
+... files = [(b'foo', 123, (0, 0)), (b'bar', 456, (1, 0))]
... for f in files:
... fm.startitem()
-... fm.write('path', '%s', f[0])
-... fm.condwrite(ui.verbose, 'date', ' %s',
-... fm.formatdate(f[2], '%Y-%m-%d %H:%M:%S'))
+... fm.write(b'path', b'%s', f[0])
+... fm.condwrite(ui.verbose, b'date', b' %s',
+... fm.formatdate(f[2], b'%Y-%m-%d %H:%M:%S'))
... fm.data(size=f[1])
-... fm.plain('\\n')
+... fm.plain(b'\\n')
... fm.end()
>>> show(files)
foo
@@ -67,7 +71,7 @@
>>> show(files, verbose=True)
foo 1970-01-01 00:00:00
bar 1970-01-01 00:00:01
->>> show(files, template='json')
+>>> show(files, template=b'json')
[
{
"date": [0, 0],
@@ -80,7 +84,7 @@
"size": 456
}
]
->>> show(files, template='path: {path}\\ndate: {date|rfc3339date}\\n')
+>>> show(files, template=b'path: {path}\\ndate: {date|rfc3339date}\\n')
path: foo
date: 1970-01-01T00:00:00+00:00
path: bar
@@ -90,18 +94,18 @@
>>> def subrepos(ui, fm):
... fm.startitem()
-... fm.write('repo', '[%s]\\n', 'baz')
-... files(ui, fm.nested('files'))
+... fm.write(b'repo', b'[%s]\\n', b'baz')
+... files(ui, fm.nested(b'files'))
... fm.end()
>>> show(subrepos)
[baz]
foo
bar
->>> show(subrepos, template='{repo}: {join(files % "{path}", ", ")}\\n')
+>>> show(subrepos, template=b'{repo}: {join(files % "{path}", ", ")}\\n')
baz: foo, bar
"""
-from __future__ import absolute_import
+from __future__ import absolute_import, print_function
import collections
import contextlib
--- a/mercurial/graphmod.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/graphmod.py Sat Sep 30 07:52:48 2017 -0700
@@ -172,7 +172,7 @@
yield (cur, type, data, (col, color), edges)
seen = next
-def asciiedges(type, char, lines, state, rev, parents):
+def asciiedges(type, char, state, rev, parents):
"""adds edge info to changelog DAG walk suitable for ascii()"""
seen = state['seen']
if rev not in seen:
@@ -192,6 +192,7 @@
state['edges'][parent] = state['styles'].get(ptype, '|')
ncols = len(seen)
+ width = 1 + ncols * 2
nextseen = seen[:]
nextseen[nodeidx:nodeidx + 1] = newparents
edges = [(nodeidx, nextseen.index(p)) for p in knownparents]
@@ -205,9 +206,9 @@
edges.append((nodeidx, nodeidx))
edges.append((nodeidx, nodeidx + 1))
nmorecols = 1
- yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
+ width += 2
+ yield (type, char, width, (nodeidx, edges, ncols, nmorecols))
char = '\\'
- lines = []
nodeidx += 1
ncols += 1
edges = []
@@ -218,9 +219,11 @@
if len(newparents) > 1:
edges.append((nodeidx, nodeidx + 1))
nmorecols = len(nextseen) - ncols
+ if nmorecols > 0:
+ width += 2
# remove current node from edge characters, no longer needed
state['edges'].pop(rev, None)
- yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
+ yield (type, char, width, (nodeidx, edges, ncols, nmorecols))
def _fixlongrightedges(edges):
for (i, (start, end)) in enumerate(edges):
--- a/mercurial/help/config.txt Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/help/config.txt Sat Sep 30 07:52:48 2017 -0700
@@ -313,6 +313,9 @@
``ignorews``
Ignore white space when comparing lines.
+``ignorewseol``
+ Ignore white space at the end of a line when comparing lines.
+
``ignorewsamount``
Ignore changes in the amount of white space.
@@ -1610,6 +1613,10 @@
Minimum delay before showing a new topic. When set to less than 3 * refresh,
that value will be used instead. (default: 1)
+``estimateinterval``
+ Maximum sampling interval in seconds for speed and estimated time
+ calculation. (default: 60)
+
``refresh``
Time in seconds between refreshes of the progress bar. (default: 0.1)
@@ -2004,7 +2011,9 @@
``origbackuppath``
The path to a directory used to store generated .orig files. If the path is
- not a directory, one will be created.
+ not a directory, one will be created. If set, files stored in this
+ directory have the same name as the original file and do not have a .orig
+ suffix.
``paginate``
Control the pagination of command output (default: True). See :hg:`help pager`
--- a/mercurial/hg.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/hg.py Sat Sep 30 07:52:48 2017 -0700
@@ -180,17 +180,17 @@
def defaultdest(source):
'''return default destination of clone if none is given
- >>> defaultdest('foo')
+ >>> defaultdest(b'foo')
'foo'
- >>> defaultdest('/foo/bar')
+ >>> defaultdest(b'/foo/bar')
'bar'
- >>> defaultdest('/')
+ >>> defaultdest(b'/')
''
- >>> defaultdest('')
+ >>> defaultdest(b'')
''
- >>> defaultdest('http://example.org/')
+ >>> defaultdest(b'http://example.org/')
''
- >>> defaultdest('http://example.org/foo/')
+ >>> defaultdest(b'http://example.org/foo/')
'foo'
'''
path = util.url(source).path
@@ -641,11 +641,11 @@
destrepo = destpeer.local()
if destrepo:
template = uimod.samplehgrcs['cloned']
- fp = destrepo.vfs("hgrc", "w", text=True)
+ fp = destrepo.vfs("hgrc", "wb")
u = util.url(abspath)
u.passwd = None
- defaulturl = str(u)
- fp.write(template % defaulturl)
+ defaulturl = bytes(u)
+ fp.write(util.tonativeeol(template % defaulturl))
fp.close()
destrepo.ui.setconfig('paths', 'default', defaulturl, 'clone')
--- a/mercurial/hgweb/common.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/hgweb/common.py Sat Sep 30 07:52:48 2017 -0700
@@ -178,7 +178,8 @@
if err.errno == errno.ENOENT:
raise ErrorResponse(HTTP_NOT_FOUND)
else:
- raise ErrorResponse(HTTP_SERVER_ERROR, err.strerror)
+ raise ErrorResponse(HTTP_SERVER_ERROR,
+ encoding.strtolocal(err.strerror))
def paritygen(stripecount, offset=0):
"""count parity of horizontal stripes for easier reading"""
--- a/mercurial/hgweb/hgweb_mod.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/hgweb/hgweb_mod.py Sat Sep 30 07:52:48 2017 -0700
@@ -99,7 +99,7 @@
self.archivespecs = archivespecs
self.maxchanges = self.configint('web', 'maxchanges', 10)
- self.stripecount = self.configint('web', 'stripes', 1)
+ self.stripecount = self.configint('web', 'stripes')
self.maxshortchanges = self.configint('web', 'maxshortchanges', 60)
self.maxfiles = self.configint('web', 'maxfiles', 10)
self.allowpull = self.configbool('web', 'allowpull', True)
@@ -320,7 +320,7 @@
rctx = requestcontext(self, repo)
# This state is global across all threads.
- encoding.encoding = rctx.config('web', 'encoding', encoding.encoding)
+ encoding.encoding = rctx.config('web', 'encoding')
rctx.repo.ui.environ = req.env
if rctx.csp:
--- a/mercurial/hgweb/hgwebdir_mod.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/hgweb/hgwebdir_mod.py Sat Sep 30 07:52:48 2017 -0700
@@ -29,10 +29,12 @@
from .request import wsgirequest
from .. import (
+ configitems,
encoding,
error,
hg,
profiling,
+ pycompat,
scmutil,
templater,
ui as uimod,
@@ -70,9 +72,9 @@
"""yield url paths and filesystem paths from a list of repo paths
>>> conv = lambda seq: [(v, util.pconvert(p)) for v,p in seq]
- >>> conv(urlrepos('hg', '/opt', ['/opt/r', '/opt/r/r', '/opt']))
+ >>> conv(urlrepos(b'hg', b'/opt', [b'/opt/r', b'/opt/r/r', b'/opt']))
[('hg/r', '/opt/r'), ('hg/r/r', '/opt/r/r'), ('hg', '/opt')]
- >>> conv(urlrepos('', '/opt', ['/opt/r', '/opt/r/r', '/opt']))
+ >>> conv(urlrepos(b'', b'/opt', [b'/opt/r', b'/opt/r/r', b'/opt']))
[('r', '/opt/r'), ('r/r', '/opt/r/r'), ('', '/opt')]
"""
for path in paths:
@@ -84,17 +86,17 @@
"""
Extract CGI variables from baseurl
- >>> geturlcgivars("http://host.org/base", "80")
+ >>> geturlcgivars(b"http://host.org/base", b"80")
('host.org', '80', '/base')
- >>> geturlcgivars("http://host.org:8000/base", "80")
+ >>> geturlcgivars(b"http://host.org:8000/base", b"80")
('host.org', '8000', '/base')
- >>> geturlcgivars('/base', 8000)
+ >>> geturlcgivars(b'/base', 8000)
('', '8000', '/base')
- >>> geturlcgivars("base", '8000')
+ >>> geturlcgivars(b"base", b'8000')
('', '8000', '/base')
- >>> geturlcgivars("http://host", '8000')
+ >>> geturlcgivars(b"http://host", b'8000')
('host', '8000', '/')
- >>> geturlcgivars("http://host/", '8000')
+ >>> geturlcgivars(b"http://host/", b'8000')
('host', '8000', '/')
"""
u = util.url(baseurl)
@@ -105,7 +107,7 @@
if not path.startswith('/'):
path = '/' + path
- return name, str(port), path
+ return name, pycompat.bytestr(port), path
class hgwebdir(object):
"""HTTP server for multiple repositories.
@@ -124,10 +126,11 @@
self.refresh()
def refresh(self):
- refreshinterval = 20
if self.ui:
- refreshinterval = self.ui.configint('web', 'refreshinterval',
- refreshinterval)
+ refreshinterval = self.ui.configint('web', 'refreshinterval')
+ else:
+ item = configitems.coreitems['web']['refreshinterval']
+ refreshinterval = item.default
# refreshinterval <= 0 means to always refresh.
if (refreshinterval > 0 and
@@ -170,16 +173,14 @@
self.repos = repos
self.ui = u
- encoding.encoding = self.ui.config('web', 'encoding',
- encoding.encoding)
- self.style = self.ui.config('web', 'style', 'paper')
- self.templatepath = self.ui.config('web', 'templates', None,
- untrusted=False)
- self.stripecount = self.ui.config('web', 'stripes', 1)
+ encoding.encoding = self.ui.config('web', 'encoding')
+ self.style = self.ui.config('web', 'style')
+ self.templatepath = self.ui.config('web', 'templates', untrusted=False)
+ self.stripecount = self.ui.config('web', 'stripes')
if self.stripecount:
self.stripecount = int(self.stripecount)
self._baseurl = self.ui.config('web', 'baseurl')
- prefix = self.ui.config('web', 'prefix', '')
+ prefix = self.ui.config('web', 'prefix')
if prefix.startswith('/'):
prefix = prefix[1:]
if prefix.endswith('/'):
@@ -290,10 +291,10 @@
repo = hg.repository(self.ui.copy(), real)
return hgweb_mod.hgweb(repo).run_wsgi(req)
except IOError as inst:
- msg = inst.strerror
+ msg = encoding.strtolocal(inst.strerror)
raise ErrorResponse(HTTP_SERVER_ERROR, msg)
except error.RepoError as inst:
- raise ErrorResponse(HTTP_SERVER_ERROR, str(inst))
+ raise ErrorResponse(HTTP_SERVER_ERROR, bytes(inst))
# browse subdirectories
subdir = virtual + '/'
@@ -325,8 +326,8 @@
def rawentries(subdir="", **map):
- descend = self.ui.configbool('web', 'descend', True)
- collapse = self.ui.configbool('web', 'collapse', False)
+ descend = self.ui.configbool('web', 'descend')
+ collapse = self.ui.configbool('web', 'collapse')
seenrepos = set()
seendirs = set()
for name, path in self.repos:
@@ -429,7 +430,7 @@
continue
contact = get_contact(get)
- description = get("web", "description", "")
+ description = get("web", "description")
seenrepos.add(name)
name = get("web", "name", name)
row = {'contact': contact or "unknown",
@@ -492,7 +493,7 @@
else:
yield config('web', 'motd', '')
- def config(section, name, default=None, untrusted=True):
+ def config(section, name, default=uimod._unset, untrusted=True):
return self.ui.config(section, name, default, untrusted)
self.updatereqenv(req.env)
--- a/mercurial/hgweb/protocol.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/hgweb/protocol.py Sat Sep 30 07:52:48 2017 -0700
@@ -75,6 +75,9 @@
return args
def getfile(self, fp):
length = int(self.req.env['CONTENT_LENGTH'])
+ # If httppostargs is used, we need to read Content-Length
+ # minus the amount that was consumed by args.
+ length -= int(self.req.env.get('HTTP_X_HGARGS_POST', 0))
for s in util.filechunkiter(self.req, limit=length):
fp.write(s)
def redirect(self):
--- a/mercurial/hgweb/server.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/hgweb/server.py Sat Sep 30 07:52:48 2017 -0700
@@ -277,13 +277,13 @@
handler.preparehttpserver(self, ui)
- prefix = ui.config('web', 'prefix', '')
+ prefix = ui.config('web', 'prefix')
if prefix:
prefix = '/' + prefix.strip('/')
self.prefix = prefix
- alog = openlog(ui.config('web', 'accesslog', '-'), ui.fout)
- elog = openlog(ui.config('web', 'errorlog', '-'), ui.ferr)
+ alog = openlog(ui.config('web', 'accesslog'), ui.fout)
+ elog = openlog(ui.config('web', 'errorlog'), ui.ferr)
self.accesslog = alog
self.errorlog = elog
@@ -326,8 +326,8 @@
mimetypes.init()
sys.setdefaultencoding(oldenc)
- address = ui.config('web', 'address', '')
- port = util.getport(ui.config('web', 'port', 8000))
+ address = ui.config('web', 'address')
+ port = util.getport(ui.config('web', 'port'))
try:
return cls(ui, app, (address, port), handler)
except socket.error as inst:
--- a/mercurial/hgweb/webcommands.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/hgweb/webcommands.py Sat Sep 30 07:52:48 2017 -0700
@@ -719,8 +719,11 @@
start = max(0, count - web.maxchanges)
end = min(count, start + web.maxchanges)
+ desc = web.config("web", "description")
+ if not desc:
+ desc = 'unknown'
return tmpl("summary",
- desc=web.config("web", "description", "unknown"),
+ desc=desc,
owner=get_contact(web.config) or "unknown",
lastchange=tip.date(),
tags=tagentries,
@@ -759,7 +762,7 @@
ctx = fctx.changectx()
basectx = ctx.p1()
- style = web.config('web', 'style', 'paper')
+ style = web.config('web', 'style')
if 'style' in req.form:
style = req.form['style'][0]
@@ -996,7 +999,7 @@
revs = fctx.filelog().revs(start, end - 1)
entries = []
- diffstyle = web.config('web', 'style', 'paper')
+ diffstyle = web.config('web', 'style')
if 'style' in req.form:
diffstyle = req.form['style'][0]
@@ -1111,13 +1114,13 @@
ctx = webutil.changectx(web.repo, req)
pats = []
- matchfn = scmutil.match(ctx, [])
+ match = scmutil.match(ctx, [])
file = req.form.get('file', None)
if file:
pats = ['path:' + file[0]]
- matchfn = scmutil.match(ctx, pats, default='path')
+ match = scmutil.match(ctx, pats, default='path')
if pats:
- files = [f for f in ctx.manifest().keys() if matchfn(f)]
+ files = [f for f in ctx.manifest().keys() if match(f)]
if not files:
raise ErrorResponse(HTTP_NOT_FOUND,
'file(s) not found: %s' % file[0])
@@ -1132,7 +1135,7 @@
req.respond(HTTP_OK, mimetype)
archival.archive(web.repo, req, cnode, artype, prefix=name,
- matchfn=matchfn,
+ matchfn=match,
subrepos=web.configbool("web", "archivesubrepos"))
return []
--- a/mercurial/hgweb/webutil.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/hgweb/webutil.py Sat Sep 30 07:52:48 2017 -0700
@@ -406,7 +406,7 @@
if basectx is None:
basectx = ctx.p1()
- style = web.config('web', 'style', 'paper')
+ style = web.config('web', 'style')
if 'style' in req.form:
style = req.form['style'][0]
--- a/mercurial/httppeer.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/httppeer.py Sat Sep 30 07:52:48 2017 -0700
@@ -9,6 +9,7 @@
from __future__ import absolute_import
import errno
+import io
import os
import socket
import struct
@@ -86,13 +87,51 @@
resp.__class__ = readerproxy
+class _multifile(object):
+ def __init__(self, *fileobjs):
+ for f in fileobjs:
+ if not util.safehasattr(f, 'length'):
+ raise ValueError(
+ '_multifile only supports file objects that '
+ 'have a length but this one does not:', type(f), f)
+ self._fileobjs = fileobjs
+ self._index = 0
+
+ @property
+ def length(self):
+ return sum(f.length for f in self._fileobjs)
+
+ def read(self, amt=None):
+ if amt <= 0:
+ return ''.join(f.read() for f in self._fileobjs)
+ parts = []
+ while amt and self._index < len(self._fileobjs):
+ parts.append(self._fileobjs[self._index].read(amt))
+ got = len(parts[-1])
+ if got < amt:
+ self._index += 1
+ amt -= got
+ return ''.join(parts)
+
+ def seek(self, offset, whence=os.SEEK_SET):
+ if whence != os.SEEK_SET:
+ raise NotImplementedError(
+ '_multifile does not support anything other'
+ ' than os.SEEK_SET for whence on seek()')
+ if offset != 0:
+ raise NotImplementedError(
+ '_multifile only supports seeking to start, but that '
+ 'could be fixed if you need it')
+ for f in self._fileobjs:
+ f.seek(0)
+ self._index = 0
+
class httppeer(wireproto.wirepeer):
def __init__(self, ui, path):
- self.path = path
- self.caps = None
- self.handler = None
- self.urlopener = None
- self.requestbuilder = None
+ self._path = path
+ self._caps = None
+ self._urlopener = None
+ self._requestbuilder = None
u = util.url(path)
if u.query or u.fragment:
raise error.Abort(_('unsupported URL component: "%s"') %
@@ -101,39 +140,60 @@
# urllib cannot handle URLs with embedded user or passwd
self._url, authinfo = u.authinfo()
- self.ui = ui
- self.ui.debug('using %s\n' % self._url)
+ self._ui = ui
+ ui.debug('using %s\n' % self._url)
- self.urlopener = url.opener(ui, authinfo)
- self.requestbuilder = urlreq.request
+ self._urlopener = url.opener(ui, authinfo)
+ self._requestbuilder = urlreq.request
def __del__(self):
- urlopener = getattr(self, 'urlopener', None)
+ urlopener = getattr(self, '_urlopener', None)
if urlopener:
for h in urlopener.handlers:
h.close()
getattr(h, "close_all", lambda : None)()
+ # Begin of _basepeer interface.
+
+ @util.propertycache
+ def ui(self):
+ return self._ui
+
def url(self):
- return self.path
+ return self._path
+
+ def local(self):
+ return None
+
+ def peer(self):
+ return self
+
+ def canpush(self):
+ return True
+
+ def close(self):
+ pass
+
+ # End of _basepeer interface.
+
+ # Begin of _basewirepeer interface.
+
+ def capabilities(self):
+ if self._caps is None:
+ try:
+ self._fetchcaps()
+ except error.RepoError:
+ self._caps = set()
+ self.ui.debug('capabilities: %s\n' %
+ (' '.join(self._caps or ['none'])))
+ return self._caps
+
+ # End of _basewirepeer interface.
# look up capabilities only when needed
def _fetchcaps(self):
- self.caps = set(self._call('capabilities').split())
-
- def _capabilities(self):
- if self.caps is None:
- try:
- self._fetchcaps()
- except error.RepoError:
- self.caps = set()
- self.ui.debug('capabilities: %s\n' %
- (' '.join(self.caps or ['none'])))
- return self.caps
-
- def lock(self):
- raise error.Abort(_('operation not supported over http'))
+ self._caps = set(self._call('capabilities').split())
def _callstream(self, cmd, _compressible=False, **args):
if cmd == 'pushkey':
@@ -148,18 +208,20 @@
# Important: don't use self.capable() here or else you end up
# with infinite recursion when trying to look up capabilities
# for the first time.
- postargsok = self.caps is not None and 'httppostargs' in self.caps
- # TODO: support for httppostargs when data is a file-like
- # object rather than a basestring
- canmungedata = not data or isinstance(data, basestring)
- if postargsok and canmungedata:
+ postargsok = self._caps is not None and 'httppostargs' in self._caps
+ if postargsok and args:
strargs = urlreq.urlencode(sorted(args.items()))
- if strargs:
- if not data:
- data = strargs
- elif isinstance(data, basestring):
- data = strargs + data
- headers['X-HgArgs-Post'] = len(strargs)
+ if not data:
+ data = strargs
+ else:
+ if isinstance(data, basestring):
+ i = io.BytesIO(data)
+ i.length = len(data)
+ data = i
+ argsio = io.BytesIO(strargs)
+ argsio.length = len(strargs)
+ data = _multifile(argsio, data)
+ headers['X-HgArgs-Post'] = len(strargs)
else:
if len(args) > 0:
httpheader = self.capable('httpheader')
@@ -193,7 +255,7 @@
protoparams = []
mediatypes = set()
- if self.caps is not None:
+ if self._caps is not None:
mt = self.capable('httpmediatype')
if mt:
protoparams.append('0.1')
@@ -221,13 +283,13 @@
if varyheaders:
headers['Vary'] = ','.join(varyheaders)
- req = self.requestbuilder(cu, data, headers)
+ req = self._requestbuilder(cu, data, headers)
if data is not None:
self.ui.debug("sending %s bytes\n" % size)
req.add_unredirected_header('Content-Length', '%d' % size)
try:
- resp = self.urlopener.open(req)
+ resp = self._urlopener.open(req)
except urlerr.httperror as inst:
if inst.code == 401:
raise error.Abort(_('authorization failed'))
--- a/mercurial/keepalive.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/keepalive.py Sat Sep 30 07:52:48 2017 -0700
@@ -90,6 +90,7 @@
import sys
import threading
+from .i18n import _
from . import (
util,
)
@@ -231,6 +232,10 @@
self._cm.add(host, h, 0)
self._start_transaction(h, req)
r = h.getresponse()
+ # The string form of BadStatusLine is the status line. Add some context
+ # to make the error message slightly more useful.
+ except httplib.BadStatusLine as err:
+ raise urlerr.urlerror(_('bad HTTP status line: %s') % err.line)
except (socket.error, httplib.HTTPException) as err:
raise urlerr.urlerror(err)
@@ -388,7 +393,7 @@
def read(self, amt=None):
# the _rbuf test is only in this first if for speed. It's not
# logically necessary
- if self._rbuf and not amt is None:
+ if self._rbuf and amt is not None:
L = len(self._rbuf)
if amt > L:
amt -= L
--- a/mercurial/localrepo.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/localrepo.py Sat Sep 30 07:52:48 2017 -0700
@@ -31,6 +31,7 @@
context,
dirstate,
dirstateguard,
+ discovery,
encoding,
error,
exchange,
@@ -49,6 +50,7 @@
phases,
pushkey,
pycompat,
+ repository,
repoview,
revset,
revsetlang,
@@ -144,45 +146,52 @@
'unbundle'}
legacycaps = moderncaps.union({'changegroupsubset'})
-class localpeer(peer.peerrepository):
+class localpeer(repository.peer):
'''peer for a local repo; reflects only the most recent API'''
def __init__(self, repo, caps=None):
+ super(localpeer, self).__init__()
+
if caps is None:
caps = moderncaps.copy()
- peer.peerrepository.__init__(self)
self._repo = repo.filtered('served')
- self.ui = repo.ui
+ self._ui = repo.ui
self._caps = repo._restrictcapabilities(caps)
- self.requirements = repo.requirements
- self.supportedformats = repo.supportedformats
+
+ # Begin of _basepeer interface.
+
+ @util.propertycache
+ def ui(self):
+ return self._ui
+
+ def url(self):
+ return self._repo.url()
+
+ def local(self):
+ return self._repo
+
+ def peer(self):
+ return self
+
+ def canpush(self):
+ return True
def close(self):
self._repo.close()
- def _capabilities(self):
- return self._caps
-
- def local(self):
- return self._repo
+ # End of _basepeer interface.
- def canpush(self):
- return True
-
- def url(self):
- return self._repo.url()
-
- def lookup(self, key):
- return self._repo.lookup(key)
+ # Begin of _basewirecommands interface.
def branchmap(self):
return self._repo.branchmap()
- def heads(self):
- return self._repo.heads()
+ def capabilities(self):
+ return self._caps
- def known(self, nodes):
- return self._repo.known(nodes)
+ def debugwireargs(self, one, two, three=None, four=None, five=None):
+ """Used to test argument passing over the wire"""
+ return "%s %s %s %s %s" % (one, two, three, four, five)
def getbundle(self, source, heads=None, common=None, bundlecaps=None,
**kwargs):
@@ -199,8 +208,24 @@
else:
return changegroup.getunbundler('01', cb, None)
- # TODO We might want to move the next two calls into legacypeer and add
- # unbundle instead.
+ def heads(self):
+ return self._repo.heads()
+
+ def known(self, nodes):
+ return self._repo.known(nodes)
+
+ def listkeys(self, namespace):
+ return self._repo.listkeys(namespace)
+
+ def lookup(self, key):
+ return self._repo.lookup(key)
+
+ def pushkey(self, namespace, key, old, new):
+ return self._repo.pushkey(namespace, key, old, new)
+
+ def stream_out(self):
+ raise error.Abort(_('cannot perform stream clone against local '
+ 'peer'))
def unbundle(self, cg, heads, url):
"""apply a bundle on a repo
@@ -237,37 +262,41 @@
except error.PushRaced as exc:
raise error.ResponseError(_('push failed:'), str(exc))
- def lock(self):
- return self._repo.lock()
+ # End of _basewirecommands interface.
- def pushkey(self, namespace, key, old, new):
- return self._repo.pushkey(namespace, key, old, new)
+ # Begin of peer interface.
- def listkeys(self, namespace):
- return self._repo.listkeys(namespace)
+ def iterbatch(self):
+ return peer.localiterbatcher(self)
- def debugwireargs(self, one, two, three=None, four=None, five=None):
- '''used to test argument passing over the wire'''
- return "%s %s %s %s %s" % (one, two, three, four, five)
+ # End of peer interface.
-class locallegacypeer(localpeer):
+class locallegacypeer(repository.legacypeer, localpeer):
'''peer extension which implements legacy methods too; used for tests with
restricted capabilities'''
def __init__(self, repo):
- localpeer.__init__(self, repo, caps=legacycaps)
+ super(locallegacypeer, self).__init__(repo, caps=legacycaps)
+
+ # Begin of baselegacywirecommands interface.
+
+ def between(self, pairs):
+ return self._repo.between(pairs)
def branches(self, nodes):
return self._repo.branches(nodes)
- def between(self, pairs):
- return self._repo.between(pairs)
-
def changegroup(self, basenodes, source):
- return changegroup.changegroup(self._repo, basenodes, source)
+ outgoing = discovery.outgoing(self._repo, missingroots=basenodes,
+ missingheads=self._repo.heads())
+ return changegroup.makechangegroup(self._repo, outgoing, '01', source)
def changegroupsubset(self, bases, heads, source):
- return changegroup.changegroupsubset(self._repo, bases, heads, source)
+ outgoing = discovery.outgoing(self._repo, missingroots=bases,
+ missingheads=heads)
+ return changegroup.makechangegroup(self._repo, outgoing, '01', source)
+
+ # End of baselegacywirecommands interface.
# Increment the sub-version when the revlog v2 format changes to lock out old
# clients.
@@ -575,6 +604,10 @@
chainspan = self.ui.configbytes('experimental', 'maxdeltachainspan', -1)
if 0 <= chainspan:
self.svfs.options['maxdeltachainspan'] = chainspan
+ mmapindexthreshold = self.ui.configbytes('experimental',
+ 'mmapindexthreshold', None)
+ if mmapindexthreshold is not None:
+ self.svfs.options['mmapindexthreshold'] = mmapindexthreshold
for r in self.requirements:
if r.startswith('exp-compression-'):
@@ -1467,6 +1500,13 @@
# dirstate is invalidated separately in invalidatedirstate()
if k == 'dirstate':
continue
+ if (k == 'changelog' and
+ self.currenttransaction() and
+ self.changelog._delayed):
+ # The changelog object may store unwritten revisions. We don't
+ # want to lose them.
+ # TODO: Solve the problem instead of working around it.
+ continue
if clearfilecache:
del self._filecache[k]
--- a/mercurial/mail.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/mail.py Sat Sep 30 07:52:48 2017 -0700
@@ -10,8 +10,8 @@
import email
import email.charset
import email.header
+import email.message
import os
-import quopri
import smtplib
import socket
import time
@@ -216,17 +216,17 @@
'''Return MIME message.
Quoted-printable transfer encoding will be used if necessary.
'''
- enc = None
+ cs = email.charset.Charset(charset)
+ msg = email.message.Message()
+ msg.set_type('text/' + subtype)
+
for line in body.splitlines():
if len(line) > 950:
- body = quopri.encodestring(body)
- enc = "quoted-printable"
+ cs.body_encoding = email.charset.QP
break
- msg = email.MIMEText.MIMEText(body, subtype, charset)
- if enc:
- del msg['Content-Transfer-Encoding']
- msg['Content-Transfer-Encoding'] = enc
+ msg.set_payload(body, cs)
+
return msg
def _charsets(ui):
--- a/mercurial/manifest.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/manifest.py Sat Sep 30 07:52:48 2017 -0700
@@ -442,6 +442,8 @@
self._lm[key] = node, self.flags(key, '')
def __contains__(self, key):
+ if key is None:
+ return False
return key in self._lm
def __delitem__(self, key):
@@ -1231,7 +1233,8 @@
super(manifestrevlog, self).__init__(opener, indexfile,
# only root indexfile is cached
- checkambig=not bool(dir))
+ checkambig=not bool(dir),
+ mmaplargeindex=True)
@property
def fulltextcache(self):
--- a/mercurial/match.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/match.py Sat Sep 30 07:52:48 2017 -0700
@@ -5,7 +5,7 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
+from __future__ import absolute_import, print_function
import copy
import os
@@ -18,6 +18,11 @@
util,
)
+allpatternkinds = ('re', 'glob', 'path', 'relglob', 'relpath', 'relre',
+ 'listfile', 'listfile0', 'set', 'include', 'subinclude',
+ 'rootfilesin')
+cwdrelativepatternkinds = ('relpath', 'glob')
+
propertycache = util.propertycache
def _rematcher(regex):
@@ -190,7 +195,7 @@
normalized and rooted patterns and with listfiles expanded.'''
kindpats = []
for kind, pat in [_patsplit(p, default) for p in patterns]:
- if kind in ('glob', 'relpath'):
+ if kind in cwdrelativepatternkinds:
pat = pathutil.canonpath(root, cwd, pat, auditor)
elif kind in ('relglob', 'path', 'rootfilesin'):
pat = util.normpath(pat)
@@ -575,28 +580,29 @@
The paths are remapped to remove/insert the path as needed:
- >>> m1 = match('root', '', ['a.txt', 'sub/b.txt'])
- >>> m2 = subdirmatcher('sub', m1)
- >>> bool(m2('a.txt'))
+ >>> from . import pycompat
+ >>> m1 = match(b'root', b'', [b'a.txt', b'sub/b.txt'])
+ >>> m2 = subdirmatcher(b'sub', m1)
+ >>> bool(m2(b'a.txt'))
False
- >>> bool(m2('b.txt'))
+ >>> bool(m2(b'b.txt'))
True
- >>> bool(m2.matchfn('a.txt'))
+ >>> bool(m2.matchfn(b'a.txt'))
False
- >>> bool(m2.matchfn('b.txt'))
+ >>> bool(m2.matchfn(b'b.txt'))
True
>>> m2.files()
['b.txt']
- >>> m2.exact('b.txt')
+ >>> m2.exact(b'b.txt')
True
- >>> util.pconvert(m2.rel('b.txt'))
+ >>> util.pconvert(m2.rel(b'b.txt'))
'sub/b.txt'
>>> def bad(f, msg):
- ... print "%s: %s" % (f, msg)
+ ... print(pycompat.sysstr(b"%s: %s" % (f, msg)))
>>> m1.bad = bad
- >>> m2.bad('x.txt', 'No such file')
+ >>> m2.bad(b'x.txt', b'No such file')
sub/x.txt: No such file
- >>> m2.abs('c.txt')
+ >>> m2.abs(b'c.txt')
'sub/c.txt'
"""
@@ -691,30 +697,31 @@
pattern."""
if ':' in pattern:
kind, pat = pattern.split(':', 1)
- if kind in ('re', 'glob', 'path', 'relglob', 'relpath', 'relre',
- 'listfile', 'listfile0', 'set', 'include', 'subinclude',
- 'rootfilesin'):
+ if kind in allpatternkinds:
return kind, pat
return default, pattern
def _globre(pat):
r'''Convert an extended glob string to a regexp string.
- >>> print _globre(r'?')
+ >>> from . import pycompat
+ >>> def bprint(s):
+ ... print(pycompat.sysstr(s))
+ >>> bprint(_globre(br'?'))
.
- >>> print _globre(r'*')
+ >>> bprint(_globre(br'*'))
[^/]*
- >>> print _globre(r'**')
+ >>> bprint(_globre(br'**'))
.*
- >>> print _globre(r'**/a')
+ >>> bprint(_globre(br'**/a'))
(?:.*/)?a
- >>> print _globre(r'a/**/b')
+ >>> bprint(_globre(br'a/**/b'))
a\/(?:.*/)?b
- >>> print _globre(r'[a*?!^][^b][!c]')
+ >>> bprint(_globre(br'[a*?!^][^b][!c]'))
[a*?!^][\^b][^c]
- >>> print _globre(r'{a,b}')
+ >>> bprint(_globre(br'{a,b}'))
(?:a|b)
- >>> print _globre(r'.\*\?')
+ >>> bprint(_globre(br'.\*\?'))
\.\*\?
'''
i, n = 0, len(pat)
@@ -907,17 +914,20 @@
include directories that need to be implicitly considered as either, such as
parent directories.
- >>> _rootsanddirs(\
- [('glob', 'g/h/*', ''), ('glob', 'g/h', ''), ('glob', 'g*', '')])
+ >>> _rootsanddirs(
+ ... [(b'glob', b'g/h/*', b''), (b'glob', b'g/h', b''),
+ ... (b'glob', b'g*', b'')])
(['g/h', 'g/h', '.'], ['g', '.'])
- >>> _rootsanddirs(\
- [('rootfilesin', 'g/h', ''), ('rootfilesin', '', '')])
+ >>> _rootsanddirs(
+ ... [(b'rootfilesin', b'g/h', b''), (b'rootfilesin', b'', b'')])
([], ['g/h', '.', 'g', '.'])
- >>> _rootsanddirs(\
- [('relpath', 'r', ''), ('path', 'p/p', ''), ('path', '', '')])
+ >>> _rootsanddirs(
+ ... [(b'relpath', b'r', b''), (b'path', b'p/p', b''),
+ ... (b'path', b'', b'')])
(['r', 'p/p', '.'], ['p', '.'])
- >>> _rootsanddirs(\
- [('relglob', 'rg*', ''), ('re', 're/', ''), ('relre', 'rr', '')])
+ >>> _rootsanddirs(
+ ... [(b'relglob', b'rg*', b''), (b're', b're/', b''),
+ ... (b'relre', b'rr', b'')])
(['.', '.', '.'], ['.'])
'''
r, d = _patternrootsanddirs(kindpats)
@@ -934,9 +944,9 @@
def _explicitfiles(kindpats):
'''Returns the potential explicit filenames from the patterns.
- >>> _explicitfiles([('path', 'foo/bar', '')])
+ >>> _explicitfiles([(b'path', b'foo/bar', b'')])
['foo/bar']
- >>> _explicitfiles([('rootfilesin', 'foo/bar', '')])
+ >>> _explicitfiles([(b'rootfilesin', b'foo/bar', b'')])
[]
'''
# Keep only the pattern kinds where one can specify filenames (vs only
--- a/mercurial/mdiff.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/mdiff.py Sat Sep 30 07:52:48 2017 -0700
@@ -63,6 +63,7 @@
'index': 0,
'ignorews': False,
'ignorewsamount': False,
+ 'ignorewseol': False,
'ignoreblanklines': False,
'upgrade': False,
'showsimilarity': False,
@@ -97,6 +98,8 @@
text = bdiff.fixws(text, 0)
if blank and opts.ignoreblanklines:
text = re.sub('\n+', '\n', text).strip('\n')
+ if opts.ignorewseol:
+ text = re.sub(r'[ \t\r\f]+\n', r'\n', text)
return text
def splitblock(base1, lines1, base2, lines2, opts):
@@ -199,7 +202,7 @@
"""
if opts is None:
opts = defaultopts
- if opts.ignorews or opts.ignorewsamount:
+ if opts.ignorews or opts.ignorewsamount or opts.ignorewseol:
text1 = wsclean(opts, text1, False)
text2 = wsclean(opts, text2, False)
diff = bdiff.blocks(text1, text2)
--- a/mercurial/merge.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/merge.py Sat Sep 30 07:52:48 2017 -0700
@@ -495,12 +495,14 @@
f.close()
else:
wctx[dfile].remove(ignoremissing=True)
- complete, r, deleted = filemerge.premerge(self._repo, self._local,
- lfile, fcd, fco, fca,
+ complete, r, deleted = filemerge.premerge(self._repo, wctx,
+ self._local, lfile, fcd,
+ fco, fca,
labels=self._labels)
else:
- complete, r, deleted = filemerge.filemerge(self._repo, self._local,
- lfile, fcd, fco, fca,
+ complete, r, deleted = filemerge.filemerge(self._repo, wctx,
+ self._local, lfile, fcd,
+ fco, fca,
labels=self._labels)
if r is None:
# no real conflict
@@ -753,7 +755,7 @@
# check case-folding collision in provisional merged manifest
foldmap = {}
- for f in sorted(pmmf):
+ for f in pmmf:
fold = util.normcase(f)
if fold in foldmap:
raise error.Abort(_("case-folding collision between %s and %s")
@@ -988,7 +990,8 @@
else: # only when merge.preferancestor=* - the default
repo.ui.note(
_("note: merging %s and %s using bids from ancestors %s\n") %
- (wctx, mctx, _(' and ').join(str(anc) for anc in ancestors)))
+ (wctx, mctx, _(' and ').join(pycompat.bytestr(anc)
+ for anc in ancestors)))
# Call for bids
fbids = {} # mapping filename to bids (action method to list af actions)
@@ -1027,7 +1030,7 @@
# bids is a mapping from action method to list af actions
# Consensus?
if len(bids) == 1: # all bids are the same kind of method
- m, l = bids.items()[0]
+ m, l = list(bids.items())[0]
if all(a == l[0] for a in l[1:]): # len(bids) is > 1
repo.ui.note(_(" %s: consensus for %s\n") % (f, m))
actions[f] = l[0]
@@ -1053,7 +1056,7 @@
for _f, args, msg in l:
repo.ui.note(' %s -> %s\n' % (msg, m))
# Pick random action. TODO: Instead, prompt user when resolving
- m, l = bids.items()[0]
+ m, l = list(bids.items())[0]
repo.ui.warn(_(' %s: ambiguous merge - picked %s action\n') %
(f, m))
actions[f] = l[0]
@@ -1082,18 +1085,21 @@
return prunedactions, diverge, renamedelete
+def _getcwd():
+ try:
+ return pycompat.getcwd()
+ except OSError as err:
+ if err.errno == errno.ENOENT:
+ return None
+ raise
+
def batchremove(repo, wctx, actions):
"""apply removes to the working directory
yields tuples for progress updates
"""
verbose = repo.ui.verbose
- try:
- cwd = pycompat.getcwd()
- except OSError as err:
- if err.errno != errno.ENOENT:
- raise
- cwd = None
+ cwd = _getcwd()
i = 0
for f, args, msg in actions:
repo.ui.debug(" %s: %s -> r\n" % (f, msg))
@@ -1111,18 +1117,16 @@
i += 1
if i > 0:
yield i, f
- if cwd:
- # cwd was present before we started to remove files
- # let's check if it is present after we removed them
- try:
- pycompat.getcwd()
- except OSError as err:
- if err.errno != errno.ENOENT:
- raise
- # Print a warning if cwd was deleted
- repo.ui.warn(_("current directory was removed\n"
- "(consider changing to repo root: %s)\n") %
- repo.root)
+
+ if cwd and not _getcwd():
+ # cwd was removed in the course of removing files; print a helpful
+ # warning.
+ repo.ui.warn(_("current directory was removed\n"
+ "(consider changing to repo root: %s)\n") % repo.root)
+
+ # It's necessary to flush here in case we're inside a worker fork and will
+ # quit after this function.
+ wctx.flushall()
def batchget(repo, mctx, wctx, actions):
"""apply gets to the working directory
@@ -1150,9 +1154,7 @@
except OSError as e:
if e.errno != errno.ENOENT:
raise
-
- if repo.wvfs.isdir(f) and not repo.wvfs.islink(f):
- repo.wvfs.removedirs(f)
+ wctx[f].clearunknown()
wctx[f].write(fctx(f).data(), flags, backgroundclose=True)
if i == 100:
yield i, f
@@ -1161,6 +1163,10 @@
if i > 0:
yield i, f
+ # It's necessary to flush here in case we're inside a worker fork and will
+ # quit after this function.
+ wctx.flushall()
+
def applyupdates(repo, actions, wctx, mctx, overwrite, labels=None):
"""apply the merge action list to the working directory
@@ -1229,6 +1235,10 @@
progress(_updating, z, item=item, total=numupdates, unit=_files)
removed = len(actions['r'])
+ # We should flush before forking into worker processes, since those workers
+ # flush when they complete, and we don't want to duplicate work.
+ wctx.flushall()
+
# get in parallel
prog = worker.worker(repo.ui, 0.001, batchget, (repo, mctx, wctx),
actions['g'])
@@ -1470,7 +1480,7 @@
def update(repo, node, branchmerge, force, ancestor=None,
mergeancestor=False, labels=None, matcher=None, mergeforce=False,
- updatecheck=None):
+ updatecheck=None, wc=None):
"""
Perform a merge between the working directory and the given node
@@ -1518,6 +1528,9 @@
2 = abort: uncommitted changes (commit or update --clean to discard changes)
3 = abort: uncommitted changes (checked in commands.py)
+ The merge is performed inside ``wc``, a workingctx-like objects. It defaults
+ to repo[None] if None is passed.
+
Return the same tuple as applyupdates().
"""
# Avoid cycle.
@@ -1541,7 +1554,8 @@
else:
partial = True
with repo.wlock():
- wc = repo[None]
+ if wc is None:
+ wc = repo[None]
pl = wc.parents()
p1 = pl[0]
pas = [None]
@@ -1700,6 +1714,7 @@
repo.vfs.write('updatestate', p2.hex())
stats = applyupdates(repo, actions, wc, p2, overwrite, labels=labels)
+ wc.flushall()
if not partial:
with repo.dirstate.parentchange():
--- a/mercurial/minirst.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/minirst.py Sat Sep 30 07:52:48 2017 -0700
@@ -46,13 +46,13 @@
'''
Apply a list of (find, replace) pairs to a text.
- >>> replace("foo bar", [('f', 'F'), ('b', 'B')])
+ >>> replace(b"foo bar", [(b'f', b'F'), (b'b', b'B')])
'Foo Bar'
- >>> encoding.encoding = 'latin1'
- >>> replace('\\x81\\\\', [('\\\\', '/')])
+ >>> encoding.encoding = b'latin1'
+ >>> replace(b'\\x81\\\\', [(b'\\\\', b'/')])
'\\x81/'
- >>> encoding.encoding = 'shiftjis'
- >>> replace('\\x81\\\\', [('\\\\', '/')])
+ >>> encoding.encoding = b'shiftjis'
+ >>> replace(b'\\x81\\\\', [(b'\\\\', b'/')])
'\\x81\\\\'
'''
--- a/mercurial/obsolete.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/obsolete.py Sat Sep 30 07:52:48 2017 -0700
@@ -20,12 +20,12 @@
besides old and news changeset identifiers, such as creation date or
author name.
-The old obsoleted changeset is called a "precursor" and possible
+The old obsoleted changeset is called a "predecessor" and possible
replacements are called "successors". Markers that used changeset X as
-a precursor are called "successor markers of X" because they hold
+a predecessor are called "successor markers of X" because they hold
information about the successors of X. Markers that use changeset Y as
-a successors are call "precursor markers of Y" because they hold
-information about the precursors of Y.
+a successors are call "predecessor markers of Y" because they hold
+information about the predecessors of Y.
Examples:
@@ -102,7 +102,7 @@
"""Returns True if the given repository has the given obsolete option
enabled.
"""
- result = set(repo.ui.configlist('experimental', 'evolution'))
+ result = set(repo.ui.configlist('experimental', 'stabilization'))
if 'all' in result:
return True
@@ -294,11 +294,11 @@
#
# - uint8: number of metadata entries M
#
-# - 20 or 32 bytes: precursor changeset identifier.
+# - 20 or 32 bytes: predecessor changeset identifier.
#
# - N*(20 or 32) bytes: successors changesets identifiers.
#
-# - P*(20 or 32) bytes: parents of the precursors changesets.
+# - P*(20 or 32) bytes: parents of the predecessors changesets.
#
# - M*(uint8, uint8): size of all metadata entries (key and value)
#
@@ -314,7 +314,7 @@
_fm1parentshift = 14
_fm1parentmask = (_fm1parentnone << _fm1parentshift)
_fm1metapair = 'BB'
-_fm1metapairsize = _calcsize('BB')
+_fm1metapairsize = _calcsize(_fm1metapair)
def _fm1purereadmarkers(data, off, stop):
# make some global constants local for performance
@@ -470,11 +470,18 @@
for mark in markers:
successors.setdefault(mark[0], set()).add(mark)
+def _addprecursors(*args, **kwargs):
+ msg = ("'obsolete._addprecursors' is deprecated, "
+ "use 'obsolete._addpredecessors'")
+ util.nouideprecwarn(msg, '4.4')
+
+ return _addpredecessors(*args, **kwargs)
+
@util.nogc
-def _addprecursors(precursors, markers):
+def _addpredecessors(predecessors, markers):
for mark in markers:
for suc in mark[1]:
- precursors.setdefault(suc, set()).add(mark)
+ predecessors.setdefault(suc, set()).add(mark)
@util.nogc
def _addchildren(children, markers):
@@ -499,18 +506,18 @@
"""Store obsolete markers
Markers can be accessed with two mappings:
- - precursors[x] -> set(markers on precursors edges of x)
+ - predecessors[x] -> set(markers on predecessors edges of x)
- successors[x] -> set(markers on successors edges of x)
- - children[x] -> set(markers on precursors edges of children(x)
+ - children[x] -> set(markers on predecessors edges of children(x)
"""
fields = ('prec', 'succs', 'flag', 'meta', 'date', 'parents')
- # prec: nodeid, precursor changesets
+ # prec: nodeid, predecessors changesets
# succs: tuple of nodeid, successor changesets (0-N length)
# flag: integer, flag field carrying modifier for the markers (see doc)
# meta: binary blob, encoded metadata dictionary
# date: (float, int) tuple, date of marker creation
- # parents: (tuple of nodeid) or None, parents of precursors
+ # parents: (tuple of nodeid) or None, parents of predecessors
# None is used when no data has been recorded
def __init__(self, svfs, defaultformat=_fm1version, readonly=False):
@@ -583,7 +590,7 @@
metadata = tuple(sorted(metadata.iteritems()))
- marker = (str(prec), tuple(succs), int(flag), metadata, date, parents)
+ marker = (bytes(prec), tuple(succs), int(flag), metadata, date, parents)
return bool(self.add(transaction, [marker]))
def add(self, transaction, markers):
@@ -658,11 +665,19 @@
_addsuccessors(successors, self._all)
return successors
- @propertycache
+ @property
def precursors(self):
- precursors = {}
- _addprecursors(precursors, self._all)
- return precursors
+ msg = ("'obsstore.precursors' is deprecated, "
+ "use 'obsstore.predecessors'")
+ util.nouideprecwarn(msg, '4.4')
+
+ return self.predecessors
+
+ @propertycache
+ def predecessors(self):
+ predecessors = {}
+ _addpredecessors(predecessors, self._all)
+ return predecessors
@propertycache
def children(self):
@@ -679,8 +694,8 @@
self._all.extend(markers)
if self._cached('successors'):
_addsuccessors(self.successors, markers)
- if self._cached('precursors'):
- _addprecursors(self.precursors, markers)
+ if self._cached('predecessors'):
+ _addpredecessors(self.predecessors, markers)
if self._cached('children'):
_addchildren(self.children, markers)
_checkinvalidmarkers(markers)
@@ -692,14 +707,15 @@
- marker that use this changeset as successor
- prune marker of direct children on this changeset
- - recursive application of the two rules on precursors of these markers
+ - recursive application of the two rules on predecessors of these
+ markers
It is a set so you cannot rely on order."""
pendingnodes = set(nodes)
seenmarkers = set()
seennodes = set(pendingnodes)
- precursorsmarkers = self.precursors
+ precursorsmarkers = self.predecessors
succsmarkers = self.successors
children = self.children
while pendingnodes:
@@ -892,6 +908,14 @@
@cachefor('unstable')
def _computeunstableset(repo):
+ msg = ("'unstable' volatile set is deprecated, "
+ "use 'orphan'")
+ repo.ui.deprecwarn(msg, '4.4')
+
+ return _computeorphanset(repo)
+
+@cachefor('orphan')
+def _computeorphanset(repo):
"""the set of non obsolete revisions with obsolete parents"""
pfunc = repo.changelog.parentrevs
mutable = _mutablerevs(repo)
@@ -910,7 +934,7 @@
@cachefor('suspended')
def _computesuspendedset(repo):
"""the set of obsolete parents with non obsolete descendants"""
- suspended = repo.changelog.ancestors(getrevs(repo, 'unstable'))
+ suspended = repo.changelog.ancestors(getrevs(repo, 'orphan'))
return set(r for r in getrevs(repo, 'obsolete') if r in suspended)
@cachefor('extinct')
@@ -918,9 +942,16 @@
"""the set of obsolete parents without non obsolete descendants"""
return getrevs(repo, 'obsolete') - getrevs(repo, 'suspended')
-
@cachefor('bumped')
def _computebumpedset(repo):
+ msg = ("'bumped' volatile set is deprecated, "
+ "use 'phasedivergent'")
+ repo.ui.deprecwarn(msg, '4.4')
+
+ return _computephasedivergentset(repo)
+
+@cachefor('phasedivergent')
+def _computephasedivergentset(repo):
"""the set of revs trying to obsolete public revisions"""
bumped = set()
# util function (avoid attribute lookup in the loop)
@@ -932,25 +963,33 @@
rev = ctx.rev()
# We only evaluate mutable, non-obsolete revision
node = ctx.node()
- # (future) A cache of precursors may worth if split is very common
- for pnode in obsutil.allprecursors(repo.obsstore, [node],
+ # (future) A cache of predecessors may worth if split is very common
+ for pnode in obsutil.allpredecessors(repo.obsstore, [node],
ignoreflags=bumpedfix):
prev = torev(pnode) # unfiltered! but so is phasecache
if (prev is not None) and (phase(repo, prev) <= public):
- # we have a public precursor
+ # we have a public predecessor
bumped.add(rev)
break # Next draft!
return bumped
@cachefor('divergent')
def _computedivergentset(repo):
+ msg = ("'divergent' volatile set is deprecated, "
+ "use 'contentdivergent'")
+ repo.ui.deprecwarn(msg, '4.4')
+
+ return _computecontentdivergentset(repo)
+
+@cachefor('contentdivergent')
+def _computecontentdivergentset(repo):
"""the set of rev that compete to be the final successors of some revision.
"""
divergent = set()
obsstore = repo.obsstore
newermap = {}
for ctx in repo.set('(not public()) - obsolete()'):
- mark = obsstore.precursors.get(ctx.node(), ())
+ mark = obsstore.predecessors.get(ctx.node(), ())
toprocess = set(mark)
seen = set()
while toprocess:
@@ -964,7 +1003,7 @@
if len(newer) > 1:
divergent.add(ctx.rev())
break
- toprocess.update(obsstore.precursors.get(prec, ()))
+ toprocess.update(obsstore.predecessors.get(prec, ()))
return divergent
@@ -991,7 +1030,7 @@
if 'user' not in metadata:
metadata['user'] = repo.ui.username()
useoperation = repo.ui.configbool('experimental',
- 'evolution.track-operation')
+ 'stabilization.track-operation')
if useoperation and operation:
metadata['operation'] = operation
tr = repo.transaction('add-obsolescence-marker')
--- a/mercurial/obsutil.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/obsutil.py Sat Sep 30 07:52:48 2017 -0700
@@ -9,6 +9,7 @@
from . import (
phases,
+ util
)
class marker(object):
@@ -29,7 +30,13 @@
return self._data == other._data
def precnode(self):
- """Precursor changeset node identifier"""
+ msg = ("'marker.precnode' is deprecated, "
+ "use 'marker.prednode'")
+ util.nouideprecwarn(msg, '4.4')
+ return self.prednode()
+
+ def prednode(self):
+ """Predecessor changeset node identifier"""
return self._data[0]
def succnodes(self):
@@ -37,7 +44,7 @@
return self._data[1]
def parentnodes(self):
- """Parents of the precursors (None if not recorded)"""
+ """Parents of the predecessors (None if not recorded)"""
return self._data[5]
def metadata(self):
@@ -74,7 +81,7 @@
considered missing.
"""
- precursors = repo.obsstore.precursors
+ precursors = repo.obsstore.predecessors
stack = [nodeid]
seen = set(stack)
@@ -95,7 +102,16 @@
else:
stack.append(precnodeid)
-def allprecursors(obsstore, nodes, ignoreflags=0):
+def allprecursors(*args, **kwargs):
+ """ (DEPRECATED)
+ """
+ msg = ("'obsutil.allprecursors' is deprecated, "
+ "use 'obsutil.allpredecessors'")
+ util.nouideprecwarn(msg, '4.4')
+
+ return allpredecessors(*args, **kwargs)
+
+def allpredecessors(obsstore, nodes, ignoreflags=0):
"""Yield node for every precursors of <nodes>.
Some precursors may be unknown locally.
@@ -108,7 +124,7 @@
while remaining:
current = remaining.pop()
yield current
- for mark in obsstore.precursors.get(current, ()):
+ for mark in obsstore.predecessors.get(current, ()):
# ignore marker flagged with specified flag
if mark[2] & ignoreflags:
continue
@@ -200,7 +216,7 @@
# shortcut to various useful item
nm = unfi.changelog.nodemap
- precursorsmarkers = unfi.obsstore.precursors
+ precursorsmarkers = unfi.obsstore.predecessors
successormarkers = unfi.obsstore.successors
childrenmarkers = unfi.obsstore.children
@@ -307,10 +323,30 @@
seenrevs.add(rev)
if phase(repo, rev) == public:
continue
- if set(succsmarkers(node)).issubset(addedmarkers):
+ if set(succsmarkers(node) or []).issubset(addedmarkers):
obsoleted.add(rev)
return obsoleted
+class _succs(list):
+ """small class to represent a successors with some metadata about it"""
+
+ def __init__(self, *args, **kwargs):
+ super(_succs, self).__init__(*args, **kwargs)
+ self.markers = set()
+
+ def copy(self):
+ new = _succs(self)
+ new.markers = self.markers.copy()
+ return new
+
+ @util.propertycache
+ def _set(self):
+ # immutable
+ return set(self)
+
+ def canmerge(self, other):
+ return self._set.issubset(other._set)
+
def successorssets(repo, initialnode, closest=False, cache=None):
"""Return set of all latest successors of initial nodes
@@ -429,7 +465,7 @@
# case (2): end of walk.
if current in repo:
# We have a valid successors.
- cache[current] = [(current,)]
+ cache[current] = [_succs((current,))]
else:
# Final obsolete version is unknown locally.
# Do not count that as a valid successors
@@ -505,13 +541,16 @@
succssets = []
for mark in sorted(succmarkers[current]):
# successors sets contributed by this marker
- markss = [[]]
+ base = _succs()
+ base.markers.add(mark)
+ markss = [base]
for suc in mark[1]:
# cardinal product with previous successors
productresult = []
for prefix in markss:
for suffix in cache[suc]:
- newss = list(prefix)
+ newss = prefix.copy()
+ newss.markers.update(suffix.markers)
for part in suffix:
# do not duplicated entry in successors set
# first entry wins.
@@ -523,15 +562,96 @@
# remove duplicated and subset
seen = []
final = []
- candidate = sorted(((set(s), s) for s in succssets if s),
- key=lambda x: len(x[1]), reverse=True)
- for setversion, listversion in candidate:
- for seenset in seen:
- if setversion.issubset(seenset):
+ candidates = sorted((s for s in succssets if s),
+ key=len, reverse=True)
+ for cand in candidates:
+ for seensuccs in seen:
+ if cand.canmerge(seensuccs):
+ seensuccs.markers.update(cand.markers)
break
else:
- final.append(listversion)
- seen.append(setversion)
+ final.append(cand)
+ seen.append(cand)
final.reverse() # put small successors set first
cache[current] = final
return cache[initialnode]
+
+def successorsandmarkers(repo, ctx):
+ """compute the raw data needed for computing obsfate
+ Returns a list of dict, one dict per successors set
+ """
+ if not ctx.obsolete():
+ return None
+
+ ssets = successorssets(repo, ctx.node(), closest=True)
+
+ # closestsuccessors returns an empty list for pruned revisions, remap it
+ # into a list containing an empty list for future processing
+ if ssets == []:
+ ssets = [[]]
+
+ # Try to recover pruned markers
+ succsmap = repo.obsstore.successors
+ fullsuccessorsets = [] # successor set + markers
+ for sset in ssets:
+ if sset:
+ fullsuccessorsets.append(sset)
+ else:
+ # successorsset return an empty set() when ctx or one of its
+ # successors is pruned.
+ # In this case, walk the obs-markers tree again starting with ctx
+ # and find the relevant pruning obs-makers, the ones without
+ # successors.
+ # Having these markers allow us to compute some information about
+ # its fate, like who pruned this changeset and when.
+
+ # XXX we do not catch all prune markers (eg rewritten then pruned)
+ # (fix me later)
+ foundany = False
+ for mark in succsmap.get(ctx.node(), ()):
+ if not mark[1]:
+ foundany = True
+ sset = _succs()
+ sset.markers.add(mark)
+ fullsuccessorsets.append(sset)
+ if not foundany:
+ fullsuccessorsets.append(_succs())
+
+ values = []
+ for sset in fullsuccessorsets:
+ values.append({'successors': sset, 'markers': sset.markers})
+
+ return values
+
+def successorsetverb(successorset):
+ """ Return the verb summarizing the successorset
+ """
+ if not successorset:
+ verb = 'pruned'
+ elif len(successorset) == 1:
+ verb = 'rewritten'
+ else:
+ verb = 'split'
+ return verb
+
+def markersdates(markers):
+ """returns the list of dates for a list of markers
+ """
+ return [m[4] for m in markers]
+
+def markersusers(markers):
+ """ Returns a sorted list of markers users without duplicates
+ """
+ markersmeta = [dict(m[3]) for m in markers]
+ users = set(meta.get('user') for meta in markersmeta if meta.get('user'))
+
+ return sorted(users)
+
+def markersoperations(markers):
+ """ Returns a sorted list of markers operations without duplicates
+ """
+ markersmeta = [dict(m[3]) for m in markers]
+ operations = set(meta.get('operation') for meta in markersmeta
+ if meta.get('operation'))
+
+ return sorted(operations)
--- a/mercurial/parser.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/parser.py Sat Sep 30 07:52:48 2017 -0700
@@ -16,10 +16,11 @@
# an action is a tree node name, a tree label, and an optional match
# __call__(program) parses program into a labeled tree
-from __future__ import absolute_import
+from __future__ import absolute_import, print_function
from .i18n import _
from . import (
+ encoding,
error,
util,
)
@@ -96,15 +97,15 @@
def splitargspec(spec):
"""Parse spec of function arguments into (poskeys, varkey, keys, optkey)
- >>> splitargspec('')
+ >>> splitargspec(b'')
([], None, [], None)
- >>> splitargspec('foo bar')
+ >>> splitargspec(b'foo bar')
([], None, ['foo', 'bar'], None)
- >>> splitargspec('foo *bar baz **qux')
+ >>> splitargspec(b'foo *bar baz **qux')
(['foo'], 'bar', ['baz'], 'qux')
- >>> splitargspec('*foo')
+ >>> splitargspec(b'*foo')
([], 'foo', [], None)
- >>> splitargspec('**foo')
+ >>> splitargspec(b'**foo')
([], None, [], 'foo')
"""
optkey = None
@@ -193,9 +194,17 @@
# mangle Python's exception into our format
raise error.ParseError(str(e).lower())
+def _brepr(obj):
+ if isinstance(obj, bytes):
+ return b"'%s'" % util.escapestr(obj)
+ return encoding.strtolocal(repr(obj))
+
def _prettyformat(tree, leafnodes, level, lines):
- if not isinstance(tree, tuple) or tree[0] in leafnodes:
- lines.append((level, str(tree)))
+ if not isinstance(tree, tuple):
+ lines.append((level, _brepr(tree)))
+ elif tree[0] in leafnodes:
+ rs = map(_brepr, tree[1:])
+ lines.append((level, '(%s %s)' % (tree[0], ' '.join(rs))))
else:
lines.append((level, '(%s' % tree[0]))
for s in tree[1:]:
@@ -211,60 +220,62 @@
def simplifyinfixops(tree, targetnodes):
"""Flatten chained infix operations to reduce usage of Python stack
+ >>> from . import pycompat
>>> def f(tree):
- ... print prettyformat(simplifyinfixops(tree, ('or',)), ('symbol',))
- >>> f(('or',
- ... ('or',
- ... ('symbol', '1'),
- ... ('symbol', '2')),
- ... ('symbol', '3')))
+ ... s = prettyformat(simplifyinfixops(tree, (b'or',)), (b'symbol',))
+ ... print(pycompat.sysstr(s))
+ >>> f((b'or',
+ ... (b'or',
+ ... (b'symbol', b'1'),
+ ... (b'symbol', b'2')),
+ ... (b'symbol', b'3')))
(or
- ('symbol', '1')
- ('symbol', '2')
- ('symbol', '3'))
- >>> f(('func',
- ... ('symbol', 'p1'),
- ... ('or',
- ... ('or',
- ... ('func',
- ... ('symbol', 'sort'),
- ... ('list',
- ... ('or',
- ... ('or',
- ... ('symbol', '1'),
- ... ('symbol', '2')),
- ... ('symbol', '3')),
- ... ('negate',
- ... ('symbol', 'rev')))),
- ... ('and',
- ... ('symbol', '4'),
- ... ('group',
- ... ('or',
- ... ('or',
- ... ('symbol', '5'),
- ... ('symbol', '6')),
- ... ('symbol', '7'))))),
- ... ('symbol', '8'))))
+ (symbol '1')
+ (symbol '2')
+ (symbol '3'))
+ >>> f((b'func',
+ ... (b'symbol', b'p1'),
+ ... (b'or',
+ ... (b'or',
+ ... (b'func',
+ ... (b'symbol', b'sort'),
+ ... (b'list',
+ ... (b'or',
+ ... (b'or',
+ ... (b'symbol', b'1'),
+ ... (b'symbol', b'2')),
+ ... (b'symbol', b'3')),
+ ... (b'negate',
+ ... (b'symbol', b'rev')))),
+ ... (b'and',
+ ... (b'symbol', b'4'),
+ ... (b'group',
+ ... (b'or',
+ ... (b'or',
+ ... (b'symbol', b'5'),
+ ... (b'symbol', b'6')),
+ ... (b'symbol', b'7'))))),
+ ... (b'symbol', b'8'))))
(func
- ('symbol', 'p1')
+ (symbol 'p1')
(or
(func
- ('symbol', 'sort')
+ (symbol 'sort')
(list
(or
- ('symbol', '1')
- ('symbol', '2')
- ('symbol', '3'))
+ (symbol '1')
+ (symbol '2')
+ (symbol '3'))
(negate
- ('symbol', 'rev'))))
+ (symbol 'rev'))))
(and
- ('symbol', '4')
+ (symbol '4')
(group
(or
- ('symbol', '5')
- ('symbol', '6')
- ('symbol', '7'))))
- ('symbol', '8')))
+ (symbol '5')
+ (symbol '6')
+ (symbol '7'))))
+ (symbol '8')))
"""
if not isinstance(tree, tuple):
return tree
@@ -285,6 +296,86 @@
simplified.append(op)
return tuple(reversed(simplified))
+def _buildtree(template, placeholder, replstack):
+ if template == placeholder:
+ return replstack.pop()
+ if not isinstance(template, tuple):
+ return template
+ return tuple(_buildtree(x, placeholder, replstack) for x in template)
+
+def buildtree(template, placeholder, *repls):
+ """Create new tree by substituting placeholders by replacements
+
+ >>> _ = (b'symbol', b'_')
+ >>> def f(template, *repls):
+ ... return buildtree(template, _, *repls)
+ >>> f((b'func', (b'symbol', b'only'), (b'list', _, _)),
+ ... ('symbol', '1'), ('symbol', '2'))
+ ('func', ('symbol', 'only'), ('list', ('symbol', '1'), ('symbol', '2')))
+ >>> f((b'and', _, (b'not', _)), (b'symbol', b'1'), (b'symbol', b'2'))
+ ('and', ('symbol', '1'), ('not', ('symbol', '2')))
+ """
+ if not isinstance(placeholder, tuple):
+ raise error.ProgrammingError('placeholder must be a node tuple')
+ replstack = list(reversed(repls))
+ r = _buildtree(template, placeholder, replstack)
+ if replstack:
+ raise error.ProgrammingError('too many replacements')
+ return r
+
+def _matchtree(pattern, tree, placeholder, incompletenodes, matches):
+ if pattern == tree:
+ return True
+ if not isinstance(pattern, tuple) or not isinstance(tree, tuple):
+ return False
+ if pattern == placeholder and tree[0] not in incompletenodes:
+ matches.append(tree)
+ return True
+ if len(pattern) != len(tree):
+ return False
+ return all(_matchtree(p, x, placeholder, incompletenodes, matches)
+ for p, x in zip(pattern, tree))
+
+def matchtree(pattern, tree, placeholder=None, incompletenodes=()):
+ """If a tree matches the pattern, return a list of the tree and nodes
+ matched with the placeholder; Otherwise None
+
+ >>> def f(pattern, tree):
+ ... m = matchtree(pattern, tree, _, {b'keyvalue', b'list'})
+ ... if m:
+ ... return m[1:]
+
+ >>> _ = (b'symbol', b'_')
+ >>> f((b'func', (b'symbol', b'ancestors'), _),
+ ... (b'func', (b'symbol', b'ancestors'), (b'symbol', b'1')))
+ [('symbol', '1')]
+ >>> f((b'func', (b'symbol', b'ancestors'), _),
+ ... (b'func', (b'symbol', b'ancestors'), None))
+ >>> f((b'range', (b'dagrange', _, _), _),
+ ... (b'range',
+ ... (b'dagrange', (b'symbol', b'1'), (b'symbol', b'2')),
+ ... (b'symbol', b'3')))
+ [('symbol', '1'), ('symbol', '2'), ('symbol', '3')]
+
+ The placeholder does not match the specified incomplete nodes because
+ an incomplete node (e.g. argument list) cannot construct an expression.
+
+ >>> f((b'func', (b'symbol', b'ancestors'), _),
+ ... (b'func', (b'symbol', b'ancestors'),
+ ... (b'list', (b'symbol', b'1'), (b'symbol', b'2'))))
+
+ The placeholder may be omitted, but which shouldn't match a None node.
+
+ >>> _ = None
+ >>> f((b'func', (b'symbol', b'ancestors'), None),
+ ... (b'func', (b'symbol', b'ancestors'), (b'symbol', b'0')))
+ """
+ if placeholder is not None and not isinstance(placeholder, tuple):
+ raise error.ProgrammingError('placeholder must be a node tuple')
+ matches = [tree]
+ if _matchtree(pattern, tree, placeholder, incompletenodes, matches):
+ return matches
+
def parseerrordetail(inst):
"""Compose error message from specified ParseError object
"""
@@ -347,27 +438,27 @@
- ``args``: list of argument names (or None for symbol declaration)
- ``errorstr``: detail about detected error (or None)
- >>> sym = lambda x: ('symbol', x)
- >>> symlist = lambda *xs: ('list',) + tuple(sym(x) for x in xs)
- >>> func = lambda n, a: ('func', sym(n), a)
+ >>> sym = lambda x: (b'symbol', x)
+ >>> symlist = lambda *xs: (b'list',) + tuple(sym(x) for x in xs)
+ >>> func = lambda n, a: (b'func', sym(n), a)
>>> parsemap = {
- ... 'foo': sym('foo'),
- ... '$foo': sym('$foo'),
- ... 'foo::bar': ('dagrange', sym('foo'), sym('bar')),
- ... 'foo()': func('foo', None),
- ... '$foo()': func('$foo', None),
- ... 'foo($1, $2)': func('foo', symlist('$1', '$2')),
- ... 'foo(bar_bar, baz.baz)':
- ... func('foo', symlist('bar_bar', 'baz.baz')),
- ... 'foo(bar($1, $2))':
- ... func('foo', func('bar', symlist('$1', '$2'))),
- ... 'foo($1, $2, nested($1, $2))':
- ... func('foo', (symlist('$1', '$2') +
- ... (func('nested', symlist('$1', '$2')),))),
- ... 'foo("bar")': func('foo', ('string', 'bar')),
- ... 'foo($1, $2': error.ParseError('unexpected token: end', 10),
- ... 'foo("bar': error.ParseError('unterminated string', 5),
- ... 'foo($1, $2, $1)': func('foo', symlist('$1', '$2', '$1')),
+ ... b'foo': sym(b'foo'),
+ ... b'$foo': sym(b'$foo'),
+ ... b'foo::bar': (b'dagrange', sym(b'foo'), sym(b'bar')),
+ ... b'foo()': func(b'foo', None),
+ ... b'$foo()': func(b'$foo', None),
+ ... b'foo($1, $2)': func(b'foo', symlist(b'$1', b'$2')),
+ ... b'foo(bar_bar, baz.baz)':
+ ... func(b'foo', symlist(b'bar_bar', b'baz.baz')),
+ ... b'foo(bar($1, $2))':
+ ... func(b'foo', func(b'bar', symlist(b'$1', b'$2'))),
+ ... b'foo($1, $2, nested($1, $2))':
+ ... func(b'foo', (symlist(b'$1', b'$2') +
+ ... (func(b'nested', symlist(b'$1', b'$2')),))),
+ ... b'foo("bar")': func(b'foo', (b'string', b'bar')),
+ ... b'foo($1, $2': error.ParseError(b'unexpected token: end', 10),
+ ... b'foo("bar': error.ParseError(b'unterminated string', 5),
+ ... b'foo($1, $2, $1)': func(b'foo', symlist(b'$1', b'$2', b'$1')),
... }
>>> def parse(expr):
... x = parsemap[expr]
@@ -375,42 +466,42 @@
... raise x
... return x
>>> def trygetfunc(tree):
- ... if not tree or tree[0] != 'func' or tree[1][0] != 'symbol':
+ ... if not tree or tree[0] != b'func' or tree[1][0] != b'symbol':
... return None
... if not tree[2]:
... return tree[1][1], []
- ... if tree[2][0] == 'list':
+ ... if tree[2][0] == b'list':
... return tree[1][1], list(tree[2][1:])
... return tree[1][1], [tree[2]]
>>> class aliasrules(basealiasrules):
... _parse = staticmethod(parse)
... _trygetfunc = staticmethod(trygetfunc)
>>> builddecl = aliasrules._builddecl
- >>> builddecl('foo')
+ >>> builddecl(b'foo')
('foo', None, None)
- >>> builddecl('$foo')
+ >>> builddecl(b'$foo')
('$foo', None, "invalid symbol '$foo'")
- >>> builddecl('foo::bar')
+ >>> builddecl(b'foo::bar')
('foo::bar', None, 'invalid format')
- >>> builddecl('foo()')
+ >>> builddecl(b'foo()')
('foo', [], None)
- >>> builddecl('$foo()')
+ >>> builddecl(b'$foo()')
('$foo()', None, "invalid function '$foo'")
- >>> builddecl('foo($1, $2)')
+ >>> builddecl(b'foo($1, $2)')
('foo', ['$1', '$2'], None)
- >>> builddecl('foo(bar_bar, baz.baz)')
+ >>> builddecl(b'foo(bar_bar, baz.baz)')
('foo', ['bar_bar', 'baz.baz'], None)
- >>> builddecl('foo($1, $2, nested($1, $2))')
+ >>> builddecl(b'foo($1, $2, nested($1, $2))')
('foo($1, $2, nested($1, $2))', None, 'invalid argument list')
- >>> builddecl('foo(bar($1, $2))')
+ >>> builddecl(b'foo(bar($1, $2))')
('foo(bar($1, $2))', None, 'invalid argument list')
- >>> builddecl('foo("bar")')
+ >>> builddecl(b'foo("bar")')
('foo("bar")', None, 'invalid argument list')
- >>> builddecl('foo($1, $2')
+ >>> builddecl(b'foo($1, $2')
('foo($1, $2', None, 'at 10: unexpected token: end')
- >>> builddecl('foo("bar')
+ >>> builddecl(b'foo("bar')
('foo("bar', None, 'at 5: unterminated string')
- >>> builddecl('foo($1, $2, $1)')
+ >>> builddecl(b'foo($1, $2, $1)')
('foo', None, 'argument names collide with each other')
"""
try:
@@ -466,37 +557,42 @@
``args`` is a list of alias argument names, or None if the alias
is declared as a symbol.
+ >>> from . import pycompat
>>> parsemap = {
- ... '$1 or foo': ('or', ('symbol', '$1'), ('symbol', 'foo')),
- ... '$1 or $bar': ('or', ('symbol', '$1'), ('symbol', '$bar')),
- ... '$10 or baz': ('or', ('symbol', '$10'), ('symbol', 'baz')),
- ... '"$1" or "foo"': ('or', ('string', '$1'), ('string', 'foo')),
+ ... b'$1 or foo': (b'or', (b'symbol', b'$1'), (b'symbol', b'foo')),
+ ... b'$1 or $bar':
+ ... (b'or', (b'symbol', b'$1'), (b'symbol', b'$bar')),
+ ... b'$10 or baz':
+ ... (b'or', (b'symbol', b'$10'), (b'symbol', b'baz')),
+ ... b'"$1" or "foo"':
+ ... (b'or', (b'string', b'$1'), (b'string', b'foo')),
... }
>>> class aliasrules(basealiasrules):
... _parse = staticmethod(parsemap.__getitem__)
... _trygetfunc = staticmethod(lambda x: None)
>>> builddefn = aliasrules._builddefn
>>> def pprint(tree):
- ... print prettyformat(tree, ('_aliasarg', 'string', 'symbol'))
- >>> args = ['$1', '$2', 'foo']
- >>> pprint(builddefn('$1 or foo', args))
+ ... s = prettyformat(tree, (b'_aliasarg', b'string', b'symbol'))
+ ... print(pycompat.sysstr(s))
+ >>> args = [b'$1', b'$2', b'foo']
+ >>> pprint(builddefn(b'$1 or foo', args))
(or
- ('_aliasarg', '$1')
- ('_aliasarg', 'foo'))
+ (_aliasarg '$1')
+ (_aliasarg 'foo'))
>>> try:
- ... builddefn('$1 or $bar', args)
+ ... builddefn(b'$1 or $bar', args)
... except error.ParseError as inst:
- ... print parseerrordetail(inst)
+ ... print(pycompat.sysstr(parseerrordetail(inst)))
invalid symbol '$bar'
- >>> args = ['$1', '$10', 'foo']
- >>> pprint(builddefn('$10 or baz', args))
+ >>> args = [b'$1', b'$10', b'foo']
+ >>> pprint(builddefn(b'$10 or baz', args))
(or
- ('_aliasarg', '$10')
- ('symbol', 'baz'))
- >>> pprint(builddefn('"$1" or "foo"', args))
+ (_aliasarg '$10')
+ (symbol 'baz'))
+ >>> pprint(builddefn(b'"$1" or "foo"', args))
(or
- ('string', '$1')
- ('string', 'foo'))
+ (string '$1')
+ (string 'foo'))
"""
tree = cls._parse(defn)
if args:
--- a/mercurial/patch.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/patch.py Sat Sep 30 07:52:48 2017 -0700
@@ -6,7 +6,7 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
+from __future__ import absolute_import, print_function
import collections
import copy
@@ -46,9 +46,7 @@
gitre = re.compile(br'diff --git a/(.*) b/(.*)')
tabsplitter = re.compile(br'(\t+|[^\t]+)')
-class PatchError(Exception):
- pass
-
+PatchError = error.PatchError
# public functions
@@ -205,10 +203,11 @@
# attempt to detect the start of a patch
# (this heuristic is borrowed from quilt)
- diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |'
- r'retrieving revision [0-9]+(\.[0-9]+)*$|'
- r'---[ \t].*?^\+\+\+[ \t]|'
- r'\*\*\*[ \t].*?^---[ \t])', re.MULTILINE|re.DOTALL)
+ diffre = re.compile(br'^(?:Index:[ \t]|diff[ \t]|RCS file: |'
+ br'retrieving revision [0-9]+(\.[0-9]+)*$|'
+ br'---[ \t].*?^\+\+\+[ \t]|'
+ br'\*\*\*[ \t].*?^---[ \t])',
+ re.MULTILINE | re.DOTALL)
data = {}
fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
@@ -230,7 +229,7 @@
pend = subject.find(']')
if pend >= 0:
subject = subject[pend + 1:].lstrip()
- subject = re.sub(r'\n[ \t]+', ' ', subject)
+ subject = re.sub(br'\n[ \t]+', ' ', subject)
ui.debug('Subject: %s\n' % subject)
if data['user']:
ui.debug('From: %s\n' % data['user'])
@@ -961,8 +960,8 @@
def countchanges(self, hunk):
"""hunk -> (n+,n-)"""
- add = len([h for h in hunk if h[0] == '+'])
- rem = len([h for h in hunk if h[0] == '-'])
+ add = len([h for h in hunk if h.startswith('+')])
+ rem = len([h for h in hunk if h.startswith('-')])
return add, rem
def reversehunk(self):
@@ -973,7 +972,7 @@
unchanged.
"""
m = {'+': '-', '-': '+', '\\': '\\'}
- hunk = ['%s%s' % (m[l[0]], l[1:]) for l in self.hunk]
+ hunk = ['%s%s' % (m[l[0:1]], l[1:]) for l in self.hunk]
return recordhunk(self.header, self.toline, self.fromline, self.proc,
self.before, hunk, self.after)
@@ -996,54 +995,55 @@
def __repr__(self):
return '<hunk %r@%d>' % (self.filename(), self.fromline)
+messages = {
+ 'multiple': {
+ 'discard': _("discard change %d/%d to '%s'?"),
+ 'record': _("record change %d/%d to '%s'?"),
+ 'revert': _("revert change %d/%d to '%s'?"),
+ },
+ 'single': {
+ 'discard': _("discard this change to '%s'?"),
+ 'record': _("record this change to '%s'?"),
+ 'revert': _("revert this change to '%s'?"),
+ },
+ 'help': {
+ 'discard': _('[Ynesfdaq?]'
+ '$$ &Yes, discard this change'
+ '$$ &No, skip this change'
+ '$$ &Edit this change manually'
+ '$$ &Skip remaining changes to this file'
+ '$$ Discard remaining changes to this &file'
+ '$$ &Done, skip remaining changes and files'
+ '$$ Discard &all changes to all remaining files'
+ '$$ &Quit, discarding no changes'
+ '$$ &? (display help)'),
+ 'record': _('[Ynesfdaq?]'
+ '$$ &Yes, record this change'
+ '$$ &No, skip this change'
+ '$$ &Edit this change manually'
+ '$$ &Skip remaining changes to this file'
+ '$$ Record remaining changes to this &file'
+ '$$ &Done, skip remaining changes and files'
+ '$$ Record &all changes to all remaining files'
+ '$$ &Quit, recording no changes'
+ '$$ &? (display help)'),
+ 'revert': _('[Ynesfdaq?]'
+ '$$ &Yes, revert this change'
+ '$$ &No, skip this change'
+ '$$ &Edit this change manually'
+ '$$ &Skip remaining changes to this file'
+ '$$ Revert remaining changes to this &file'
+ '$$ &Done, skip remaining changes and files'
+ '$$ Revert &all changes to all remaining files'
+ '$$ &Quit, reverting no changes'
+ '$$ &? (display help)')
+ }
+}
+
def filterpatch(ui, headers, operation=None):
"""Interactively filter patch chunks into applied-only chunks"""
if operation is None:
operation = 'record'
- messages = {
- 'multiple': {
- 'discard': _("discard change %d/%d to '%s'?"),
- 'record': _("record change %d/%d to '%s'?"),
- 'revert': _("revert change %d/%d to '%s'?"),
- }[operation],
- 'single': {
- 'discard': _("discard this change to '%s'?"),
- 'record': _("record this change to '%s'?"),
- 'revert': _("revert this change to '%s'?"),
- }[operation],
- 'help': {
- 'discard': _('[Ynesfdaq?]'
- '$$ &Yes, discard this change'
- '$$ &No, skip this change'
- '$$ &Edit this change manually'
- '$$ &Skip remaining changes to this file'
- '$$ Discard remaining changes to this &file'
- '$$ &Done, skip remaining changes and files'
- '$$ Discard &all changes to all remaining files'
- '$$ &Quit, discarding no changes'
- '$$ &? (display help)'),
- 'record': _('[Ynesfdaq?]'
- '$$ &Yes, record this change'
- '$$ &No, skip this change'
- '$$ &Edit this change manually'
- '$$ &Skip remaining changes to this file'
- '$$ Record remaining changes to this &file'
- '$$ &Done, skip remaining changes and files'
- '$$ Record &all changes to all remaining files'
- '$$ &Quit, recording no changes'
- '$$ &? (display help)'),
- 'revert': _('[Ynesfdaq?]'
- '$$ &Yes, revert this change'
- '$$ &No, skip this change'
- '$$ &Edit this change manually'
- '$$ &Skip remaining changes to this file'
- '$$ Revert remaining changes to this &file'
- '$$ &Done, skip remaining changes and files'
- '$$ Revert &all changes to all remaining files'
- '$$ &Quit, reverting no changes'
- '$$ &? (display help)')
- }[operation]
- }
def prompt(skipfile, skipall, query, chunk):
"""prompt query, and process base inputs
@@ -1061,7 +1061,7 @@
if skipfile is not None:
return skipfile, skipfile, skipall, newpatches
while True:
- resps = messages['help']
+ resps = messages['help'][operation]
r = ui.promptchoice("%s %s" % (query, resps))
ui.write("\n")
if r == 8: # ?
@@ -1166,10 +1166,11 @@
if skipfile is None and skipall is None:
chunk.pretty(ui)
if total == 1:
- msg = messages['single'] % chunk.filename()
+ msg = messages['single'][operation] % chunk.filename()
else:
idx = pos - len(h.hunks) + i
- msg = messages['multiple'] % (idx, total, chunk.filename())
+ msg = messages['multiple'][operation] % (idx, total,
+ chunk.filename())
r, skipfile, skipall, newpatches = prompt(skipfile,
skipall, msg, chunk)
if r:
@@ -1476,7 +1477,7 @@
This function operates on hunks coming out of patch.filterpatch, that is
a list of the form: [header1, hunk1, hunk2, header2...]. Example usage:
- >>> rawpatch = """diff --git a/folder1/g b/folder1/g
+ >>> rawpatch = b"""diff --git a/folder1/g b/folder1/g
... --- a/folder1/g
... +++ b/folder1/g
... @@ -1,7 +1,7 @@
@@ -1489,7 +1490,7 @@
... 5
... d
... +lastline"""
- >>> hunks = parsepatch(rawpatch)
+ >>> hunks = parsepatch([rawpatch])
>>> hunkscomingfromfilterpatch = []
>>> for h in hunks:
... hunkscomingfromfilterpatch.append(h)
@@ -1500,9 +1501,9 @@
>>> fp = util.stringio()
>>> for c in reversedhunks:
... c.write(fp)
- >>> fp.seek(0)
+ >>> fp.seek(0) or None
>>> reversedpatch = fp.read()
- >>> print reversedpatch
+ >>> print(pycompat.sysstr(reversedpatch))
diff --git a/folder1/g b/folder1/g
--- a/folder1/g
+++ b/folder1/g
@@ -1538,7 +1539,7 @@
If maxcontext is not None, trim context lines if necessary.
- >>> rawpatch = '''diff --git a/folder1/g b/folder1/g
+ >>> rawpatch = b'''diff --git a/folder1/g b/folder1/g
... --- a/folder1/g
... +++ b/folder1/g
... @@ -1,8 +1,10 @@
@@ -1559,7 +1560,7 @@
... header.write(out)
... for hunk in header.hunks:
... hunk.write(out)
- >>> print(out.getvalue())
+ >>> print(pycompat.sysstr(out.getvalue()))
diff --git a/folder1/g b/folder1/g
--- a/folder1/g
+++ b/folder1/g
@@ -1664,17 +1665,17 @@
Returns (stripped components, path in repository).
- >>> pathtransform('a/b/c', 0, '')
+ >>> pathtransform(b'a/b/c', 0, b'')
('', 'a/b/c')
- >>> pathtransform(' a/b/c ', 0, '')
+ >>> pathtransform(b' a/b/c ', 0, b'')
('', ' a/b/c')
- >>> pathtransform(' a/b/c ', 2, '')
+ >>> pathtransform(b' a/b/c ', 2, b'')
('a/b/', 'c')
- >>> pathtransform('a/b/c', 0, 'd/e/')
+ >>> pathtransform(b'a/b/c', 0, b'd/e/')
('', 'd/e/a/b/c')
- >>> pathtransform(' a//b/c ', 2, 'd/e/')
+ >>> pathtransform(b' a//b/c ', 2, b'd/e/')
('a//b/', 'd/e/c')
- >>> pathtransform('a/b/c', 3, '')
+ >>> pathtransform(b'a/b/c', 3, b'')
Traceback (most recent call last):
PatchError: unable to strip away 1 of 3 dirs from a/b/c
'''
@@ -1690,7 +1691,7 @@
(count, strip, path))
i += 1
# consume '//' in the path
- while i < pathlen - 1 and path[i] == '/':
+ while i < pathlen - 1 and path[i:i + 1] == '/':
i += 1
count -= 1
return path[:i].lstrip(), prefix + path[i:].rstrip()
@@ -1758,7 +1759,7 @@
- ('hunk', [hunk_lines])
- ('range', (-start,len, +start,len, proc))
"""
- lines_re = re.compile(r'@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)')
+ lines_re = re.compile(br'@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)')
lr = linereader(fp)
def scanwhile(first, p):
@@ -1785,7 +1786,7 @@
else:
lr.push(fromfile)
yield 'file', header
- elif line[0] == ' ':
+ elif line[0:1] == ' ':
yield 'context', scanwhile(line, lambda l: l[0] in ' \\')
elif line[0] in '-+':
yield 'hunk', scanwhile(line, lambda l: l[0] in '-+\\')
@@ -2282,6 +2283,7 @@
'ignorewsamount')
buildopts['ignoreblanklines'] = get('ignore_blank_lines',
'ignoreblanklines')
+ buildopts['ignorewseol'] = get('ignore_space_at_eol', 'ignorewseol')
if formatchanging:
buildopts['text'] = opts and opts.get('text')
binary = None if opts is None else opts.get('binary')
--- a/mercurial/pathutil.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/pathutil.py Sat Sep 30 07:52:48 2017 -0700
@@ -203,9 +203,9 @@
See also issue3033 for detail about need of this function.
- >>> normasprefix('/foo/bar').replace(os.sep, '/')
+ >>> normasprefix(b'/foo/bar').replace(pycompat.ossep, b'/')
'/foo/bar/'
- >>> normasprefix('/').replace(os.sep, '/')
+ >>> normasprefix(b'/').replace(pycompat.ossep, b'/')
'/'
'''
d, p = os.path.splitdrive(path)
--- a/mercurial/peer.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/peer.py Sat Sep 30 07:52:48 2017 -0700
@@ -8,7 +8,6 @@
from __future__ import absolute_import
-from .i18n import _
from . import (
error,
util,
@@ -49,15 +48,6 @@
def results(self):
raise NotImplementedError()
-class localbatch(batcher):
- '''performs the queued calls directly'''
- def __init__(self, local):
- batcher.__init__(self)
- self.local = local
- def submit(self):
- for name, args, opts, resref in self.calls:
- resref.set(getattr(self.local, name)(*args, **opts))
-
class localiterbatcher(iterbatcher):
def __init__(self, local):
super(iterbatcher, self).__init__()
@@ -69,7 +59,8 @@
def results(self):
for name, args, opts, resref in self.calls:
- yield getattr(self.local, name)(*args, **opts)
+ resref.set(getattr(self.local, name)(*args, **opts))
+ yield resref.value
def batchable(f):
'''annotation for batchable methods
@@ -78,9 +69,6 @@
@batchable
def sample(self, one, two=None):
- # Handle locally computable results first:
- if not one:
- yield "a local result", None
# Build list of encoded arguments suitable for your wire protocol:
encargs = [('one', encode(one),), ('two', encode(two),)]
# Create future for injection of encoded result:
@@ -106,50 +94,3 @@
return next(batchable)
setattr(plain, 'batchable', f)
return plain
-
-class peerrepository(object):
-
- def batch(self):
- return localbatch(self)
-
- def iterbatch(self):
- """Batch requests but allow iterating over the results.
-
- This is to allow interleaving responses with things like
- progress updates for clients.
- """
- return localiterbatcher(self)
-
- def capable(self, name):
- '''tell whether repo supports named capability.
- return False if not supported.
- if boolean capability, return True.
- if string capability, return string.'''
- caps = self._capabilities()
- if name in caps:
- return True
- name_eq = name + '='
- for cap in caps:
- if cap.startswith(name_eq):
- return cap[len(name_eq):]
- return False
-
- def requirecap(self, name, purpose):
- '''raise an exception if the given capability is not present'''
- if not self.capable(name):
- raise error.CapabilityError(
- _('cannot %s; remote repository does not '
- 'support the %r capability') % (purpose, name))
-
- def local(self):
- '''return peer as a localrepo, or None'''
- return None
-
- def peer(self):
- return self
-
- def canpush(self):
- return True
-
- def close(self):
- pass
--- a/mercurial/phases.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/phases.py Sat Sep 30 07:52:48 2017 -0700
@@ -103,6 +103,7 @@
from __future__ import absolute_import
import errno
+import struct
from .i18n import _
from .node import (
@@ -119,6 +120,8 @@
util,
)
+_fphasesentry = struct.Struct('>i20s')
+
allphases = public, draft, secret = range(3)
trackedphases = allphases[1:]
phasenames = ['public', 'draft', 'secret']
@@ -154,6 +157,34 @@
dirty = True
return roots, dirty
+def binaryencode(phasemapping):
+ """encode a 'phase -> nodes' mapping into a binary stream
+
+ Since phases are integer the mapping is actually a python list:
+ [[PUBLIC_HEADS], [DRAFTS_HEADS], [SECRET_HEADS]]
+ """
+ binarydata = []
+ for phase, nodes in enumerate(phasemapping):
+ for head in nodes:
+ binarydata.append(_fphasesentry.pack(phase, head))
+ return ''.join(binarydata)
+
+def binarydecode(stream):
+ """decode a binary stream into a 'phase -> nodes' mapping
+
+ Since phases are integer the mapping is actually a python list."""
+ headsbyphase = [[] for i in allphases]
+ entrysize = _fphasesentry.size
+ while True:
+ entry = stream.read(entrysize)
+ if len(entry) < entrysize:
+ if entry:
+ raise error.Abort(_('bad phase-heads stream'))
+ break
+ phase, node = _fphasesentry.unpack(entry)
+ headsbyphase[phase].append(node)
+ return headsbyphase
+
def _trackphasechange(data, rev, old, new):
"""add a phase move the <data> dictionnary
@@ -527,11 +558,18 @@
headsbyphase[phase] = [cl.node(r) for r in repo.revs(revset, subset)]
return headsbyphase
-def updatephases(repo, tr, headsbyphase):
+def updatephases(repo, trgetter, headsbyphase):
"""Updates the repo with the given phase heads"""
# Now advance phase boundaries of all but secret phase
+ #
+ # run the update (and fetch transaction) only if there are actually things
+ # to update. This avoid creating empty transaction during no-op operation.
+
for phase in allphases[:-1]:
- advanceboundary(repo, tr, phase, headsbyphase[phase])
+ revset = '%%ln - %s()' % phasenames[phase]
+ heads = [c.node() for c in repo.set(revset, headsbyphase[phase])]
+ if heads:
+ advanceboundary(repo, trgetter(), phase, heads)
def analyzeremotephases(repo, subset, roots):
"""Compute phases heads and root in a subset of node from root dict
--- a/mercurial/policy.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/policy.py Sat Sep 30 07:52:48 2017 -0700
@@ -75,7 +75,16 @@
(r'cext', r'diffhelpers'): 1,
(r'cext', r'mpatch'): 1,
(r'cext', r'osutil'): 1,
- (r'cext', r'parsers'): 1,
+ (r'cext', r'parsers'): 3,
+}
+
+# map import request to other package or module
+_modredirects = {
+ (r'cext', r'charencode'): (r'cext', r'parsers'),
+ (r'cffi', r'base85'): (r'pure', r'base85'),
+ (r'cffi', r'charencode'): (r'pure', r'charencode'),
+ (r'cffi', r'diffhelpers'): (r'pure', r'diffhelpers'),
+ (r'cffi', r'parsers'): (r'pure', r'parsers'),
}
def _checkmod(pkgname, modname, mod):
@@ -94,11 +103,14 @@
raise ImportError(r'invalid HGMODULEPOLICY %r' % policy)
assert verpkg or purepkg
if verpkg:
+ pn, mn = _modredirects.get((verpkg, modname), (verpkg, modname))
try:
- mod = _importfrom(verpkg, modname)
- _checkmod(verpkg, modname, mod)
+ mod = _importfrom(pn, mn)
+ if pn == verpkg:
+ _checkmod(pn, mn, mod)
return mod
except ImportError:
if not purepkg:
raise
- return _importfrom(purepkg, modname)
+ pn, mn = _modredirects.get((purepkg, modname), (purepkg, modname))
+ return _importfrom(pn, mn)
--- a/mercurial/posix.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/posix.py Sat Sep 30 07:52:48 2017 -0700
@@ -52,14 +52,14 @@
'''Same as posixpath.split, but faster
>>> import posixpath
- >>> for f in ['/absolute/path/to/file',
- ... 'relative/path/to/file',
- ... 'file_alone',
- ... 'path/to/directory/',
- ... '/multiple/path//separators',
- ... '/file_at_root',
- ... '///multiple_leading_separators_at_root',
- ... '']:
+ >>> for f in [b'/absolute/path/to/file',
+ ... b'relative/path/to/file',
+ ... b'file_alone',
+ ... b'path/to/directory/',
+ ... b'/multiple/path//separators',
+ ... b'/file_at_root',
+ ... b'///multiple_leading_separators_at_root',
+ ... b'']:
... assert split(f) == posixpath.split(f), f
'''
ht = p.rsplit('/', 1)
@@ -342,13 +342,13 @@
- lowercase
- omit ignored characters [200c-200f, 202a-202e, 206a-206f,feff]
- >>> normcase('UPPER')
+ >>> normcase(b'UPPER')
'upper'
- >>> normcase('Caf\xc3\xa9')
+ >>> normcase(b'Caf\\xc3\\xa9')
'cafe\\xcc\\x81'
- >>> normcase('\xc3\x89')
+ >>> normcase(b'\\xc3\\x89')
'e\\xcc\\x81'
- >>> normcase('\xb8\xca\xc3\xca\xbe\xc8.JPG') # issue3918
+ >>> normcase(b'\\xb8\\xca\\xc3\\xca\\xbe\\xc8.JPG') # issue3918
'%b8%ca%c3\\xca\\xbe%c8.jpg'
'''
@@ -372,14 +372,14 @@
c = encoding.getutf8char(path, pos)
pos += len(c)
except ValueError:
- c = '%%%02X' % ord(path[pos])
+ c = '%%%02X' % ord(path[pos:pos + 1])
pos += 1
s += c
u = s.decode('utf-8')
# Decompose then lowercase (HFS+ technote specifies lower)
- enc = unicodedata.normalize('NFD', u).lower().encode('utf-8')
+ enc = unicodedata.normalize(r'NFD', u).lower().encode('utf-8')
# drop HFS+ ignored characters
return encoding.hfsignoreclean(enc)
--- a/mercurial/progress.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/progress.py Sat Sep 30 07:52:48 2017 -0700
@@ -104,6 +104,8 @@
self.order = self.ui.configlist(
'progress', 'format',
default=['topic', 'bar', 'number', 'estimate'])
+ self.estimateinterval = self.ui.configwith(
+ float, 'progress', 'estimateinterval')
def show(self, now, topic, pos, item, unit, total):
if not shouldprint(self.ui):
@@ -215,19 +217,15 @@
delta = pos - initialpos
if delta > 0:
elapsed = now - self.starttimes[topic]
- # experimental config: progress.estimate
- if elapsed > float(
- self.ui.config('progress', 'estimate')):
- seconds = (elapsed * (target - delta)) // delta + 1
- return fmtremaining(seconds)
+ seconds = (elapsed * (target - delta)) // delta + 1
+ return fmtremaining(seconds)
return ''
def speed(self, topic, pos, unit, now):
initialpos = self.startvals[topic]
delta = pos - initialpos
elapsed = now - self.starttimes[topic]
- if elapsed > float(
- self.ui.config('progress', 'estimate')):
+ if elapsed > 0:
return _('%d %s/sec') % (delta / elapsed, unit)
return ''
@@ -242,6 +240,32 @@
else:
return False
+ def _calibrateestimate(self, topic, now, pos):
+ '''Adjust starttimes and startvals for topic so ETA works better
+
+ If progress is non-linear (ex. get much slower in the last minute),
+ it's more friendly to only use a recent time span for ETA and speed
+ calculation.
+
+ [======================================> ]
+ ^^^^^^^
+ estimateinterval, only use this for estimation
+ '''
+ interval = self.estimateinterval
+ if interval <= 0:
+ return
+ elapsed = now - self.starttimes[topic]
+ if elapsed > interval:
+ delta = pos - self.startvals[topic]
+ newdelta = delta * interval / elapsed
+ # If a stall happens temporarily, ETA could change dramatically
+ # frequently. This is to avoid such dramatical change and make ETA
+ # smoother.
+ if newdelta < 0.1:
+ return
+ self.startvals[topic] = pos - newdelta
+ self.starttimes[topic] = now - interval
+
def progress(self, topic, pos, item='', unit='', total=None):
now = time.time()
self._refreshlock.acquire()
@@ -272,6 +296,7 @@
self.topics.append(topic)
self.topicstates[topic] = pos, item, unit, total
self.curtopic = topic
+ self._calibrateestimate(topic, now, pos)
if now - self.lastprint >= self.refresh and self.topics:
if self._oktoprint(now):
self.lastprint = now
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/pure/charencode.py Sat Sep 30 07:52:48 2017 -0700
@@ -0,0 +1,85 @@
+# charencode.py - miscellaneous character encoding
+#
+# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from __future__ import absolute_import
+
+import array
+
+from .. import (
+ pycompat,
+)
+
+def isasciistr(s):
+ try:
+ s.decode('ascii')
+ return True
+ except UnicodeDecodeError:
+ return False
+
+def asciilower(s):
+ '''convert a string to lowercase if ASCII
+
+ Raises UnicodeDecodeError if non-ASCII characters are found.'''
+ s.decode('ascii')
+ return s.lower()
+
+def asciiupper(s):
+ '''convert a string to uppercase if ASCII
+
+ Raises UnicodeDecodeError if non-ASCII characters are found.'''
+ s.decode('ascii')
+ return s.upper()
+
+_jsonmap = []
+_jsonmap.extend("\\u%04x" % x for x in range(32))
+_jsonmap.extend(pycompat.bytechr(x) for x in range(32, 127))
+_jsonmap.append('\\u007f')
+_jsonmap[0x09] = '\\t'
+_jsonmap[0x0a] = '\\n'
+_jsonmap[0x22] = '\\"'
+_jsonmap[0x5c] = '\\\\'
+_jsonmap[0x08] = '\\b'
+_jsonmap[0x0c] = '\\f'
+_jsonmap[0x0d] = '\\r'
+_paranoidjsonmap = _jsonmap[:]
+_paranoidjsonmap[0x3c] = '\\u003c' # '<' (e.g. escape "</script>")
+_paranoidjsonmap[0x3e] = '\\u003e' # '>'
+_jsonmap.extend(pycompat.bytechr(x) for x in range(128, 256))
+
+def jsonescapeu8fast(u8chars, paranoid):
+ """Convert a UTF-8 byte string to JSON-escaped form (fast path)
+
+ Raises ValueError if non-ASCII characters have to be escaped.
+ """
+ if paranoid:
+ jm = _paranoidjsonmap
+ else:
+ jm = _jsonmap
+ try:
+ return ''.join(jm[x] for x in bytearray(u8chars))
+ except IndexError:
+ raise ValueError
+
+if pycompat.ispy3:
+ _utf8strict = r'surrogatepass'
+else:
+ _utf8strict = r'strict'
+
+def jsonescapeu8fallback(u8chars, paranoid):
+ """Convert a UTF-8 byte string to JSON-escaped form (slow path)
+
+ Escapes all non-ASCII characters no matter if paranoid is False.
+ """
+ if paranoid:
+ jm = _paranoidjsonmap
+ else:
+ jm = _jsonmap
+ # non-BMP char is represented as UTF-16 surrogate pair
+ u16b = u8chars.decode('utf-8', _utf8strict).encode('utf-16', _utf8strict)
+ u16codes = array.array(r'H', u16b)
+ u16codes.pop(0) # drop BOM
+ return ''.join(jm[x] if x < 128 else '\\u%04x' % x for x in u16codes)
--- a/mercurial/pure/parsers.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/pure/parsers.py Sat Sep 30 07:52:48 2017 -0700
@@ -80,7 +80,7 @@
return i * indexsize
def __delitem__(self, i):
- if not isinstance(i, slice) or not i.stop == -1 or not i.step is None:
+ if not isinstance(i, slice) or not i.stop == -1 or i.step is not None:
raise ValueError("deleting slices only supports a:-1 with step 1")
i = self._fix_index(i.start)
if i < self._lgt:
@@ -114,7 +114,7 @@
return count
def __delitem__(self, i):
- if not isinstance(i, slice) or not i.stop == -1 or not i.step is None:
+ if not isinstance(i, slice) or not i.stop == -1 or i.step is not None:
raise ValueError("deleting slices only supports a:-1 with step 1")
i = self._fix_index(i.start)
if i < self._lgt:
--- a/mercurial/pycompat.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/pycompat.py Sat Sep 30 07:52:48 2017 -0700
@@ -63,6 +63,7 @@
sysexecutable = os.fsencode(sysexecutable)
stringio = io.BytesIO
maplist = lambda *args: list(map(*args))
+ rawinput = input
# TODO: .buffer might not exist if std streams were replaced; we'll need
# a silly wrapper to make a bytes stream backed by a unicode one.
@@ -312,6 +313,7 @@
shlexsplit = shlex.split
stringio = cStringIO.StringIO
maplist = map
+ rawinput = raw_input
class _pycompatstub(object):
def __init__(self):
--- a/mercurial/registrar.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/registrar.py Sat Sep 30 07:52:48 2017 -0700
@@ -166,6 +166,16 @@
Optional argument 'takeorder' indicates whether a predicate function
takes ordering policy as the last argument.
+ Optional argument 'weight' indicates the estimated run-time cost, useful
+ for static optimization, default is 1. Higher weight means more expensive.
+ Usually, revsets that are fast and return only one revision has a weight of
+ 0.5 (ex. a symbol); revsets with O(changelog) complexity and read only the
+ changelog have weight 10 (ex. author); revsets reading manifest deltas have
+ weight 30 (ex. adds); revset reading manifest contents have weight 100
+ (ex. contains). Note: those values are flexible. If the revset has a
+ same big-O time complexity as 'contains', but with a smaller constant, it
+ might have a weight of 90.
+
'revsetpredicate' instance in example above can be used to
decorate multiple functions.
@@ -178,9 +188,10 @@
_getname = _funcregistrarbase._parsefuncdecl
_docformat = "``%s``\n %s"
- def _extrasetup(self, name, func, safe=False, takeorder=False):
+ def _extrasetup(self, name, func, safe=False, takeorder=False, weight=1):
func._safe = safe
func._takeorder = takeorder
+ func._weight = weight
class filesetpredicate(_funcregistrarbase):
"""Decorator to register fileset predicate
@@ -308,3 +319,64 @@
def _extrasetup(self, name, func, argspec=None):
func._argspec = argspec
+
+class internalmerge(_funcregistrarbase):
+ """Decorator to register in-process merge tool
+
+ Usage::
+
+ internalmerge = registrar.internalmerge()
+
+ @internalmerge('mymerge', internalmerge.mergeonly,
+ onfailure=None, precheck=None):
+ def mymergefunc(repo, mynode, orig, fcd, fco, fca,
+ toolconf, files, labels=None):
+ '''Explanation of this internal merge tool ....
+ '''
+ return 1, False # means "conflicted", "no deletion needed"
+
+ The first string argument is used to compose actual merge tool name,
+ ":name" and "internal:name" (the latter is historical one).
+
+ The second argument is one of merge types below:
+
+ ========== ======== ======== =========
+ merge type precheck premerge fullmerge
+ ========== ======== ======== =========
+ nomerge x x x
+ mergeonly o x o
+ fullmerge o o o
+ ========== ======== ======== =========
+
+ Optional argument 'onfalure' is the format of warning message
+ to be used at failure of merging (target filename is specified
+ at formatting). Or, None or so, if warning message should be
+ suppressed.
+
+ Optional argument 'precheck' is the function to be used
+ before actual invocation of internal merge tool itself.
+ It takes as same arguments as internal merge tool does, other than
+ 'files' and 'labels'. If it returns false value, merging is aborted
+ immediately (and file is marked as "unresolved").
+
+ 'internalmerge' instance in example above can be used to
+ decorate multiple functions.
+
+ Decorated functions are registered automatically at loading
+ extension, if an instance named as 'internalmerge' is used for
+ decorating in extension.
+
+ Otherwise, explicit 'filemerge.loadinternalmerge()' is needed.
+ """
+ _docformat = "``:%s``\n %s"
+
+ # merge type definitions:
+ nomerge = None
+ mergeonly = 'mergeonly' # just the full merge, no premerge
+ fullmerge = 'fullmerge' # both premerge and merge
+
+ def _extrasetup(self, name, func, mergetype,
+ onfailure=None, precheck=None):
+ func.mergetype = mergetype
+ func.onfailure = onfailure
+ func.precheck = precheck
--- a/mercurial/repair.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/repair.py Sat Sep 30 07:52:48 2017 -0700
@@ -12,7 +12,10 @@
import hashlib
from .i18n import _
-from .node import short
+from .node import (
+ hex,
+ short,
+)
from . import (
bundle2,
changegroup,
@@ -35,8 +38,9 @@
# Include a hash of all the nodes in the filename for uniqueness
allcommits = repo.set('%ln::%ln', bases, heads)
allhashes = sorted(c.hex() for c in allcommits)
- totalhash = hashlib.sha1(''.join(allhashes)).hexdigest()
- name = "%s/%s-%s-%s.hg" % (backupdir, short(node), totalhash[:8], suffix)
+ totalhash = hashlib.sha1(''.join(allhashes)).digest()
+ name = "%s/%s-%s-%s.hg" % (backupdir, short(node),
+ hex(totalhash[:4]), suffix)
cgversion = changegroup.localversion(repo)
comp = None
@@ -67,16 +71,20 @@
return sorted(files)
+def _collectrevlog(revlog, striprev):
+ _, brokenset = revlog.getstrippoint(striprev)
+ return [revlog.linkrev(r) for r in brokenset]
+
+def _collectmanifest(repo, striprev):
+ return _collectrevlog(repo.manifestlog._revlog, striprev)
+
def _collectbrokencsets(repo, files, striprev):
"""return the changesets which will be broken by the truncation"""
s = set()
- def collectone(revlog):
- _, brokenset = revlog.getstrippoint(striprev)
- s.update([revlog.linkrev(r) for r in brokenset])
- collectone(repo.manifestlog._revlog)
+ s.update(_collectmanifest(repo, striprev))
for fname in files:
- collectone(repo.file(fname))
+ s.update(_collectrevlog(repo.file(fname), striprev))
return s
@@ -174,16 +182,13 @@
tmpbundlefile = _bundle(repo, savebases, saveheads, node, 'temp',
compress=False, obsolescence=False)
- mfst = repo.manifestlog._revlog
-
try:
with repo.transaction("strip") as tr:
offset = len(tr.entries)
tr.startgroup()
cl.strip(striprev, tr)
- mfst.strip(striprev, tr)
- striptrees(repo, tr, striprev, files)
+ stripmanifest(repo, striprev, tr, files)
for fn in files:
repo.file(fn).strip(striprev, tr)
@@ -310,6 +315,11 @@
callback.topic = topic
callback.addnodes(nodelist)
+def stripmanifest(repo, striprev, tr, files):
+ revlog = repo.manifestlog._revlog
+ revlog.strip(striprev, tr)
+ striptrees(repo, tr, striprev, files)
+
def striptrees(repo, tr, striprev, files):
if 'treemanifest' in repo.requirements: # safe but unnecessary
# otherwise
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/repository.py Sat Sep 30 07:52:48 2017 -0700
@@ -0,0 +1,268 @@
+# repository.py - Interfaces and base classes for repositories and peers.
+#
+# Copyright 2017 Gregory Szorc <gregory.szorc@gmail.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from __future__ import absolute_import
+
+import abc
+
+from .i18n import _
+from . import (
+ error,
+)
+
+class _basepeer(object):
+ """Represents a "connection" to a repository.
+
+ This is the base interface for representing a connection to a repository.
+ It holds basic properties and methods applicable to all peer types.
+
+ This is not a complete interface definition and should not be used
+ outside of this module.
+ """
+ __metaclass__ = abc.ABCMeta
+
+ @abc.abstractproperty
+ def ui(self):
+ """ui.ui instance."""
+
+ @abc.abstractmethod
+ def url(self):
+ """Returns a URL string representing this peer.
+
+ Currently, implementations expose the raw URL used to construct the
+ instance. It may contain credentials as part of the URL. The
+ expectations of the value aren't well-defined and this could lead to
+ data leakage.
+
+ TODO audit/clean consumers and more clearly define the contents of this
+ value.
+ """
+
+ @abc.abstractmethod
+ def local(self):
+ """Returns a local repository instance.
+
+ If the peer represents a local repository, returns an object that
+ can be used to interface with it. Otherwise returns ``None``.
+ """
+
+ @abc.abstractmethod
+ def peer(self):
+ """Returns an object conforming to this interface.
+
+ Most implementations will ``return self``.
+ """
+
+ @abc.abstractmethod
+ def canpush(self):
+ """Returns a boolean indicating if this peer can be pushed to."""
+
+ @abc.abstractmethod
+ def close(self):
+ """Close the connection to this peer.
+
+ This is called when the peer will no longer be used. Resources
+ associated with the peer should be cleaned up.
+ """
+
+class _basewirecommands(object):
+ """Client-side interface for communicating over the wire protocol.
+
+ This interface is used as a gateway to the Mercurial wire protocol.
+ methods commonly call wire protocol commands of the same name.
+ """
+ __metaclass__ = abc.ABCMeta
+
+ @abc.abstractmethod
+ def branchmap(self):
+ """Obtain heads in named branches.
+
+ Returns a dict mapping branch name to an iterable of nodes that are
+ heads on that branch.
+ """
+
+ @abc.abstractmethod
+ def capabilities(self):
+ """Obtain capabilities of the peer.
+
+ Returns a set of string capabilities.
+ """
+
+ @abc.abstractmethod
+ def debugwireargs(self, one, two, three=None, four=None, five=None):
+ """Used to facilitate debugging of arguments passed over the wire."""
+
+ @abc.abstractmethod
+ def getbundle(self, source, **kwargs):
+ """Obtain remote repository data as a bundle.
+
+ This command is how the bulk of repository data is transferred from
+ the peer to the local repository
+
+ Returns a generator of bundle data.
+ """
+
+ @abc.abstractmethod
+ def heads(self):
+ """Determine all known head revisions in the peer.
+
+ Returns an iterable of binary nodes.
+ """
+
+ @abc.abstractmethod
+ def known(self, nodes):
+ """Determine whether multiple nodes are known.
+
+ Accepts an iterable of nodes whose presence to check for.
+
+ Returns an iterable of booleans indicating of the corresponding node
+ at that index is known to the peer.
+ """
+
+ @abc.abstractmethod
+ def listkeys(self, namespace):
+ """Obtain all keys in a pushkey namespace.
+
+ Returns an iterable of key names.
+ """
+
+ @abc.abstractmethod
+ def lookup(self, key):
+ """Resolve a value to a known revision.
+
+ Returns a binary node of the resolved revision on success.
+ """
+
+ @abc.abstractmethod
+ def pushkey(self, namespace, key, old, new):
+ """Set a value using the ``pushkey`` protocol.
+
+ Arguments correspond to the pushkey namespace and key to operate on and
+ the old and new values for that key.
+
+ Returns a string with the peer result. The value inside varies by the
+ namespace.
+ """
+
+ @abc.abstractmethod
+ def stream_out(self):
+ """Obtain streaming clone data.
+
+ Successful result should be a generator of data chunks.
+ """
+
+ @abc.abstractmethod
+ def unbundle(self, bundle, heads, url):
+ """Transfer repository data to the peer.
+
+ This is how the bulk of data during a push is transferred.
+
+ Returns the integer number of heads added to the peer.
+ """
+
+class _baselegacywirecommands(object):
+ """Interface for implementing support for legacy wire protocol commands.
+
+ Wire protocol commands transition to legacy status when they are no longer
+ used by modern clients. To facilitate identifying which commands are
+ legacy, the interfaces are split.
+ """
+ __metaclass__ = abc.ABCMeta
+
+ @abc.abstractmethod
+ def between(self, pairs):
+ """Obtain nodes between pairs of nodes.
+
+ ``pairs`` is an iterable of node pairs.
+
+ Returns an iterable of iterables of nodes corresponding to each
+ requested pair.
+ """
+
+ @abc.abstractmethod
+ def branches(self, nodes):
+ """Obtain ancestor changesets of specific nodes back to a branch point.
+
+ For each requested node, the peer finds the first ancestor node that is
+ a DAG root or is a merge.
+
+ Returns an iterable of iterables with the resolved values for each node.
+ """
+
+ @abc.abstractmethod
+ def changegroup(self, nodes, kind):
+ """Obtain a changegroup with data for descendants of specified nodes."""
+
+ @abc.abstractmethod
+ def changegroupsubset(self, bases, heads, kind):
+ pass
+
+class peer(_basepeer, _basewirecommands):
+ """Unified interface and base class for peer repositories.
+
+ All peer instances must inherit from this class and conform to its
+ interface.
+ """
+
+ @abc.abstractmethod
+ def iterbatch(self):
+ """Obtain an object to be used for multiple method calls.
+
+ Various operations call several methods on peer instances. If each
+ method call were performed immediately and serially, this would
+ require round trips to remote peers and/or would slow down execution.
+
+ Some peers have the ability to "batch" method calls to avoid costly
+ round trips or to facilitate concurrent execution.
+
+ This method returns an object that can be used to indicate intent to
+ perform batched method calls.
+
+ The returned object is a proxy of this peer. It intercepts calls to
+ batchable methods and queues them instead of performing them
+ immediately. This proxy object has a ``submit`` method that will
+ perform all queued batchable method calls. A ``results()`` method
+ exposes the results of queued/batched method calls. It is a generator
+ of results in the order they were called.
+
+ Not all peers or wire protocol implementations may actually batch method
+ calls. However, they must all support this API.
+ """
+
+ def capable(self, name):
+ """Determine support for a named capability.
+
+ Returns ``False`` if capability not supported.
+
+ Returns ``True`` if boolean capability is supported. Returns a string
+ if capability support is non-boolean.
+ """
+ caps = self.capabilities()
+ if name in caps:
+ return True
+
+ name = '%s=' % name
+ for cap in caps:
+ if cap.startswith(name):
+ return cap[len(name):]
+
+ return False
+
+ def requirecap(self, name, purpose):
+ """Require a capability to be present.
+
+ Raises a ``CapabilityError`` if the capability isn't present.
+ """
+ if self.capable(name):
+ return
+
+ raise error.CapabilityError(
+ _('cannot %s; remote repository does not support the %r '
+ 'capability') % (purpose, name))
+
+class legacypeer(peer, _baselegacywirecommands):
+ """peer but with support for legacy wire protocol commands."""
--- a/mercurial/revlog.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/revlog.py Sat Sep 30 07:52:48 2017 -0700
@@ -268,8 +268,13 @@
If checkambig, indexfile is opened with checkambig=True at
writing, to avoid file stat ambiguity.
+
+ If mmaplargeindex is True, and an mmapindexthreshold is set, the
+ index will be mmapped rather than read if it is larger than the
+ configured threshold.
"""
- def __init__(self, opener, indexfile, datafile=None, checkambig=False):
+ def __init__(self, opener, indexfile, datafile=None, checkambig=False,
+ mmaplargeindex=False):
"""
create a revlog object
@@ -301,6 +306,7 @@
self._compengine = 'zlib'
self._maxdeltachainspan = -1
+ mmapindexthreshold = None
v = REVLOG_DEFAULT_VERSION
opts = getattr(opener, 'options', None)
if opts is not None:
@@ -323,6 +329,8 @@
self._compengine = opts['compengine']
if 'maxdeltachainspan' in opts:
self._maxdeltachainspan = opts['maxdeltachainspan']
+ if mmaplargeindex and 'mmapindexthreshold' in opts:
+ mmapindexthreshold = opts['mmapindexthreshold']
if self._chunkcachesize <= 0:
raise RevlogError(_('revlog chunk cache size %r is not greater '
@@ -335,7 +343,11 @@
self._initempty = True
try:
f = self.opener(self.indexfile)
- indexdata = f.read()
+ if (mmapindexthreshold is not None and
+ self.opener.fstat(f).st_size >= mmapindexthreshold):
+ indexdata = util.buffer(util.mmapread(f))
+ else:
+ indexdata = f.read()
f.close()
if len(indexdata) > 0:
v = versionformat_unpack(indexdata[:4])[0]
@@ -1128,6 +1140,44 @@
raise LookupError(id, self.indexfile, _('no match found'))
+ def shortest(self, hexnode, minlength=1):
+ """Find the shortest unambiguous prefix that matches hexnode."""
+ def isvalid(test):
+ try:
+ if self._partialmatch(test) is None:
+ return False
+
+ try:
+ i = int(test)
+ # if we are a pure int, then starting with zero will not be
+ # confused as a rev; or, obviously, if the int is larger
+ # than the value of the tip rev
+ if test[0] == '0' or i > len(self):
+ return True
+ return False
+ except ValueError:
+ return True
+ except error.RevlogError:
+ return False
+ except error.WdirUnsupported:
+ # single 'ff...' match
+ return True
+
+ shortest = hexnode
+ startlength = max(6, minlength)
+ length = startlength
+ while True:
+ test = hexnode[:length]
+ if isvalid(test):
+ shortest = test
+ if length == minlength or length > startlength:
+ return shortest
+ length -= 1
+ else:
+ length += 1
+ if len(shortest) <= length:
+ return shortest
+
def cmp(self, node, text):
"""compare text with a given file revision
@@ -1473,7 +1523,7 @@
if revornode is None:
revornode = templatefilters.short(hex(node))
raise RevlogError(_("integrity check failed on %s:%s")
- % (self.indexfile, revornode))
+ % (self.indexfile, pycompat.bytestr(revornode)))
def checkinlinesize(self, tr, fp=None):
"""Check if the revlog is too big for inline and convert if so.
@@ -1694,6 +1744,13 @@
- rawtext is optional (can be None); if not set, cachedelta must be set.
if both are set, they must correspond to each other.
"""
+ if node == nullid:
+ raise RevlogError(_("%s: attempt to add null revision") %
+ (self.indexfile))
+ if node == wdirid:
+ raise RevlogError(_("%s: attempt to add wdir revision") %
+ (self.indexfile))
+
btext = [rawtext]
def buildtext():
if btext[0] is not None:
@@ -1865,7 +1922,7 @@
ifh.write(data[1])
self.checkinlinesize(transaction, ifh)
- def addgroup(self, cg, linkmapper, transaction, addrevisioncb=None):
+ def addgroup(self, deltas, linkmapper, transaction, addrevisioncb=None):
"""
add a delta group
@@ -1898,22 +1955,15 @@
ifh.flush()
try:
# loop through our set of deltas
- chain = None
- for chunkdata in iter(lambda: cg.deltachunk(chain), {}):
- node = chunkdata['node']
- p1 = chunkdata['p1']
- p2 = chunkdata['p2']
- cs = chunkdata['cs']
- deltabase = chunkdata['deltabase']
- delta = chunkdata['delta']
- flags = chunkdata['flags'] or REVIDX_DEFAULT_FLAGS
+ for data in deltas:
+ node, p1, p2, linknode, deltabase, delta, flags = data
+ link = linkmapper(linknode)
+ flags = flags or REVIDX_DEFAULT_FLAGS
nodes.append(node)
- link = linkmapper(cs)
if node in self.nodemap:
# this can happen if two branches make the same change
- chain = node
continue
for p in (p1, p2):
@@ -1947,13 +1997,13 @@
# We're only using addgroup() in the context of changegroup
# generation so the revision data can always be handled as raw
# by the flagprocessor.
- chain = self._addrevision(node, None, transaction, link,
- p1, p2, flags, (baserev, delta),
- ifh, dfh,
- alwayscache=bool(addrevisioncb))
+ self._addrevision(node, None, transaction, link,
+ p1, p2, flags, (baserev, delta),
+ ifh, dfh,
+ alwayscache=bool(addrevisioncb))
if addrevisioncb:
- addrevisioncb(self, chain)
+ addrevisioncb(self, node)
if not dfh and not self._inline:
# addrevision switched from inline to conventional
--- a/mercurial/revset.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/revset.py Sat Sep 30 07:52:48 2017 -0700
@@ -40,22 +40,59 @@
getargs = revsetlang.getargs
getargsdict = revsetlang.getargsdict
-# constants used as an argument of match() and matchany()
-anyorder = revsetlang.anyorder
-defineorder = revsetlang.defineorder
-followorder = revsetlang.followorder
-
baseset = smartset.baseset
generatorset = smartset.generatorset
spanset = smartset.spanset
fullreposet = smartset.fullreposet
+# Constants for ordering requirement, used in getset():
+#
+# If 'define', any nested functions and operations MAY change the ordering of
+# the entries in the set (but if changes the ordering, it MUST ALWAYS change
+# it). If 'follow', any nested functions and operations MUST take the ordering
+# specified by the first operand to the '&' operator.
+#
+# For instance,
+#
+# X & (Y | Z)
+# ^ ^^^^^^^
+# | follow
+# define
+#
+# will be evaluated as 'or(y(x()), z(x()))', where 'x()' can change the order
+# of the entries in the set, but 'y()', 'z()' and 'or()' shouldn't.
+#
+# 'any' means the order doesn't matter. For instance,
+#
+# (X & !Y) | ancestors(Z)
+# ^ ^
+# any any
+#
+# For 'X & !Y', 'X' decides the order and 'Y' is subtracted from 'X', so the
+# order of 'Y' does not matter. For 'ancestors(Z)', Z's order does not matter
+# since 'ancestors' does not care about the order of its argument.
+#
+# Currently, most revsets do not care about the order, so 'define' is
+# equivalent to 'follow' for them, and the resulting order is based on the
+# 'subset' parameter passed down to them:
+#
+# m = revset.match(...)
+# m(repo, subset, order=defineorder)
+# ^^^^^^
+# For most revsets, 'define' means using the order this subset provides
+#
+# There are a few revsets that always redefine the order if 'define' is
+# specified: 'sort(X)', 'reverse(X)', 'x:y'.
+anyorder = 'any' # don't care the order, could be even random-shuffled
+defineorder = 'define' # ALWAYS redefine, or ALWAYS follow the current order
+followorder = 'follow' # MUST follow the current order
+
# helpers
-def getset(repo, subset, x):
+def getset(repo, subset, x, order=defineorder):
if not x:
raise error.ParseError(_("missing argument"))
- return methods[x[0]](repo, subset, *x[1:])
+ return methods[x[0]](repo, subset, *x[1:], order=order)
def _getrevsource(repo, r):
extra = repo[r].extra()
@@ -69,7 +106,7 @@
# operator methods
-def stringset(repo, subset, x):
+def stringset(repo, subset, x, order):
x = scmutil.intrev(repo[x])
if (x in subset
or x == node.nullrev and isinstance(subset, fullreposet)):
@@ -126,30 +163,42 @@
return subset & xs
def andset(repo, subset, x, y, order):
- return getset(repo, getset(repo, subset, x), y)
+ if order == anyorder:
+ yorder = anyorder
+ else:
+ yorder = followorder
+ return getset(repo, getset(repo, subset, x, order), y, yorder)
+
+def andsmallyset(repo, subset, x, y, order):
+ # 'andsmally(x, y)' is equivalent to 'and(x, y)', but faster when y is small
+ if order == anyorder:
+ yorder = anyorder
+ else:
+ yorder = followorder
+ return getset(repo, getset(repo, subset, y, yorder), x, order)
def differenceset(repo, subset, x, y, order):
- return getset(repo, subset, x) - getset(repo, subset, y)
+ return getset(repo, subset, x, order) - getset(repo, subset, y, anyorder)
-def _orsetlist(repo, subset, xs):
+def _orsetlist(repo, subset, xs, order):
assert xs
if len(xs) == 1:
- return getset(repo, subset, xs[0])
+ return getset(repo, subset, xs[0], order)
p = len(xs) // 2
- a = _orsetlist(repo, subset, xs[:p])
- b = _orsetlist(repo, subset, xs[p:])
+ a = _orsetlist(repo, subset, xs[:p], order)
+ b = _orsetlist(repo, subset, xs[p:], order)
return a + b
def orset(repo, subset, x, order):
xs = getlist(x)
if order == followorder:
# slow path to take the subset order
- return subset & _orsetlist(repo, fullreposet(repo), xs)
+ return subset & _orsetlist(repo, fullreposet(repo), xs, anyorder)
else:
- return _orsetlist(repo, subset, xs)
+ return _orsetlist(repo, subset, xs, order)
def notset(repo, subset, x, order):
- return subset - getset(repo, subset, x)
+ return subset - getset(repo, subset, x, anyorder)
def relationset(repo, subset, x, y, order):
raise error.ParseError(_("can't use a relation in this context"))
@@ -176,11 +225,11 @@
def subscriptset(repo, subset, x, y, order):
raise error.ParseError(_("can't use a subscript in this context"))
-def listset(repo, subset, *xs):
+def listset(repo, subset, *xs, **opts):
raise error.ParseError(_("can't use a list in this context"),
hint=_('see hg help "revsets.x or y"'))
-def keyvaluepair(repo, subset, k, v):
+def keyvaluepair(repo, subset, k, v, order):
raise error.ParseError(_("can't use a key-value pair in this context"))
def func(repo, subset, a, b, order):
@@ -204,7 +253,7 @@
# repo - current repository instance
# subset - of revisions to be examined
# x - argument in tree form
-symbols = {}
+symbols = revsetlang.symbols
# symbols which can't be used for a DoS attack for any given input
# (e.g. those which accept regexes as plain strings shouldn't be included)
@@ -227,7 +276,7 @@
sourceset = getset(repo, fullreposet(repo), x)
return subset & baseset([destutil.destmerge(repo, sourceset=sourceset)])
-@predicate('adds(pattern)', safe=True)
+@predicate('adds(pattern)', safe=True, weight=30)
def adds(repo, subset, x):
"""Changesets that add a file matching pattern.
@@ -239,7 +288,7 @@
pat = getstring(x, _("adds requires a pattern"))
return checkstatus(repo, subset, pat, 1)
-@predicate('ancestor(*changeset)', safe=True)
+@predicate('ancestor(*changeset)', safe=True, weight=0.5)
def ancestor(repo, subset, x):
"""A greatest common ancestor of the changesets.
@@ -345,7 +394,7 @@
ps.add(r)
return subset & ps
-@predicate('author(string)', safe=True)
+@predicate('author(string)', safe=True, weight=10)
def author(repo, subset, x):
"""Alias for ``user(string)``.
"""
@@ -413,7 +462,7 @@
bms -= {node.nullrev}
return subset & bms
-@predicate('branch(string or set)', safe=True)
+@predicate('branch(string or set)', safe=True, weight=10)
def branch(repo, subset, x):
"""
All changesets belonging to the given branch or the branches of the given
@@ -459,14 +508,23 @@
@predicate('bumped()', safe=True)
def bumped(repo, subset, x):
+ msg = ("'bumped()' is deprecated, "
+ "use 'phasedivergent()'")
+ repo.ui.deprecwarn(msg, '4.4')
+
+ return phasedivergent(repo, subset, x)
+
+@predicate('phasedivergent()', safe=True)
+def phasedivergent(repo, subset, x):
"""Mutable changesets marked as successors of public changesets.
- Only non-public and non-obsolete changesets can be `bumped`.
+ Only non-public and non-obsolete changesets can be `phasedivergent`.
+ (EXPERIMENTAL)
"""
- # i18n: "bumped" is a keyword
- getargs(x, 0, 0, _("bumped takes no arguments"))
- bumped = obsmod.getrevs(repo, 'bumped')
- return subset & bumped
+ # i18n: "phasedivergent" is a keyword
+ getargs(x, 0, 0, _("phasedivergent takes no arguments"))
+ phasedivergent = obsmod.getrevs(repo, 'phasedivergent')
+ return subset & phasedivergent
@predicate('bundle()', safe=True)
def bundle(repo, subset, x):
@@ -537,7 +595,7 @@
cs = _children(repo, subset, s)
return subset & cs
-@predicate('closed()', safe=True)
+@predicate('closed()', safe=True, weight=10)
def closed(repo, subset, x):
"""Changeset is closed.
"""
@@ -546,7 +604,7 @@
return subset.filter(lambda r: repo[r].closesbranch(),
condrepr='<branch closed>')
-@predicate('contains(pattern)')
+@predicate('contains(pattern)', weight=100)
def contains(repo, subset, x):
"""The revision's manifest contains a file matching pattern (but might not
modify it). See :hg:`help patterns` for information about file patterns.
@@ -596,7 +654,7 @@
return subset.filter(lambda r: _matchvalue(r),
condrepr=('<converted %r>', rev))
-@predicate('date(interval)', safe=True)
+@predicate('date(interval)', safe=True, weight=10)
def date(repo, subset, x):
"""Changesets within the interval, see :hg:`help dates`.
"""
@@ -606,7 +664,7 @@
return subset.filter(lambda x: dm(repo[x].date()[0]),
condrepr=('<date %r>', ds))
-@predicate('desc(string)', safe=True)
+@predicate('desc(string)', safe=True, weight=10)
def desc(repo, subset, x):
"""Search commit message for string. The match is case-insensitive.
@@ -664,7 +722,7 @@
# Like ``descendants(set)`` but follows only the first parents.
return _descendants(repo, subset, x, followfirst=True)
-@predicate('destination([set])', safe=True)
+@predicate('destination([set])', safe=True, weight=10)
def destination(repo, subset, x):
"""Changesets that were created by a graft, transplant or rebase operation,
with the given revisions specified as the source. Omitting the optional set
@@ -711,13 +769,22 @@
@predicate('divergent()', safe=True)
def divergent(repo, subset, x):
- """
- Final successors of changesets with an alternative set of final successors.
+ msg = ("'divergent()' is deprecated, "
+ "use 'contentdivergent()'")
+ repo.ui.deprecwarn(msg, '4.4')
+
+ return contentdivergent(repo, subset, x)
+
+@predicate('contentdivergent()', safe=True)
+def contentdivergent(repo, subset, x):
"""
- # i18n: "divergent" is a keyword
- getargs(x, 0, 0, _("divergent takes no arguments"))
- divergent = obsmod.getrevs(repo, 'divergent')
- return subset & divergent
+ Final successors of changesets with an alternative set of final
+ successors. (EXPERIMENTAL)
+ """
+ # i18n: "contentdivergent" is a keyword
+ getargs(x, 0, 0, _("contentdivergent takes no arguments"))
+ contentdivergent = obsmod.getrevs(repo, 'contentdivergent')
+ return subset & contentdivergent
@predicate('extinct()', safe=True)
def extinct(repo, subset, x):
@@ -824,7 +891,7 @@
return subset & s
-@predicate('first(set, [n])', safe=True, takeorder=True)
+@predicate('first(set, [n])', safe=True, takeorder=True, weight=0)
def first(repo, subset, x, order):
"""An alias for limit().
"""
@@ -945,7 +1012,7 @@
getargs(x, 0, 0, _("all takes no arguments"))
return subset & spanset(repo) # drop "null" if any
-@predicate('grep(regex)')
+@predicate('grep(regex)', weight=10)
def grep(repo, subset, x):
"""Like ``keyword(string)`` but accepts a regex. Use ``grep(r'...')``
to ensure special escape characters are handled correctly. Unlike
@@ -1030,7 +1097,7 @@
'exclude=%r, default=%r, rev=%r>',
pats, inc, exc, default, rev))
-@predicate('file(pattern)', safe=True)
+@predicate('file(pattern)', safe=True, weight=10)
def hasfile(repo, subset, x):
"""Changesets affecting files matched by pattern.
@@ -1072,7 +1139,7 @@
hiddenrevs = repoview.filterrevs(repo, 'visible')
return subset & hiddenrevs
-@predicate('keyword(string)', safe=True)
+@predicate('keyword(string)', safe=True, weight=10)
def keyword(repo, subset, x):
"""Search commit message, user name, and names of changed files for
string. The match is case-insensitive.
@@ -1090,7 +1157,7 @@
return subset.filter(matches, condrepr=('<keyword %r>', kw))
-@predicate('limit(set[, n[, offset]])', safe=True, takeorder=True)
+@predicate('limit(set[, n[, offset]])', safe=True, takeorder=True, weight=0)
def limit(repo, subset, x, order):
"""First n members of set, defaulting to 1, starting from offset.
"""
@@ -1192,7 +1259,7 @@
pass
return baseset(datarepr=('<min %r, %r>', subset, os))
-@predicate('modifies(pattern)', safe=True)
+@predicate('modifies(pattern)', safe=True, weight=30)
def modifies(repo, subset, x):
"""Changesets modifying files matched by pattern.
@@ -1336,7 +1403,7 @@
# some optimizations from the fact this is a baseset.
return subset & o
-@predicate('outgoing([path])', safe=False)
+@predicate('outgoing([path])', safe=False, weight=10)
def outgoing(repo, subset, x):
"""Changesets not found in the specified destination repository, or the
default push location.
@@ -1490,8 +1557,8 @@
ps.add(parents[1].rev())
return subset & ps
-@predicate('present(set)', safe=True)
-def present(repo, subset, x):
+@predicate('present(set)', safe=True, takeorder=True)
+def present(repo, subset, x, order):
"""An empty set, if any revision in set isn't found; otherwise,
all revisions in set.
@@ -1500,7 +1567,7 @@
to continue even in such cases.
"""
try:
- return getset(repo, subset, x)
+ return getset(repo, subset, x, order)
except error.RepoLookupError:
return baseset()
@@ -1510,6 +1577,37 @@
getargs(x, 0, 0, "_notpublic takes no arguments")
return _phase(repo, subset, phases.draft, phases.secret)
+# for internal use
+@predicate('_phaseandancestors(phasename, set)', safe=True)
+def _phaseandancestors(repo, subset, x):
+ # equivalent to (phasename() & ancestors(set)) but more efficient
+ # phasename could be one of 'draft', 'secret', or '_notpublic'
+ args = getargs(x, 2, 2, "_phaseandancestors requires two arguments")
+ phasename = getsymbol(args[0])
+ s = getset(repo, fullreposet(repo), args[1])
+
+ draft = phases.draft
+ secret = phases.secret
+ phasenamemap = {
+ '_notpublic': draft,
+ 'draft': draft, # follow secret's ancestors
+ 'secret': secret,
+ }
+ if phasename not in phasenamemap:
+ raise error.ParseError('%r is not a valid phasename' % phasename)
+
+ minimalphase = phasenamemap[phasename]
+ getphase = repo._phasecache.phase
+
+ def cutfunc(rev):
+ return getphase(repo, rev) < minimalphase
+
+ revs = dagop.revancestors(repo, s, cutfunc=cutfunc)
+
+ if phasename == 'draft': # need to remove secret changesets
+ revs = revs.filter(lambda r: getphase(repo, r) == draft)
+ return subset & revs
+
@predicate('public()', safe=True)
def public(repo, subset, x):
"""Changeset in public phase."""
@@ -1556,7 +1654,7 @@
return baseset([r])
return baseset()
-@predicate('removes(pattern)', safe=True)
+@predicate('removes(pattern)', safe=True, weight=30)
def removes(repo, subset, x):
"""Changesets which remove files matching pattern.
@@ -1696,11 +1794,11 @@
return subset.filter(matches, condrepr=('<matching%r %r>', fields, revs))
-@predicate('reverse(set)', safe=True, takeorder=True)
+@predicate('reverse(set)', safe=True, takeorder=True, weight=0)
def reverse(repo, subset, x, order):
"""Reverse order of set.
"""
- l = getset(repo, subset, x)
+ l = getset(repo, subset, x, order)
if order == defineorder:
l.reverse()
return l
@@ -1764,7 +1862,8 @@
return args['set'], keyflags, opts
-@predicate('sort(set[, [-]key... [, ...]])', safe=True, takeorder=True)
+@predicate('sort(set[, [-]key... [, ...]])', safe=True, takeorder=True,
+ weight=10)
def sort(repo, subset, x, order):
"""Sort set by keys. The default sort order is ascending, specify a key
as ``-key`` to sort in descending order.
@@ -1784,7 +1883,7 @@
"""
s, keyflags, opts = _getsortargs(x)
- revs = getset(repo, subset, s)
+ revs = getset(repo, subset, s, order)
if not keyflags or order != defineorder:
return revs
@@ -1919,15 +2018,23 @@
@predicate('unstable()', safe=True)
def unstable(repo, subset, x):
- """Non-obsolete changesets with obsolete ancestors.
+ msg = ("'unstable()' is deprecated, "
+ "use 'orphan()'")
+ repo.ui.deprecwarn(msg, '4.4')
+
+ return orphan(repo, subset, x)
+
+@predicate('orphan()', safe=True)
+def orphan(repo, subset, x):
+ """Non-obsolete changesets with obsolete ancestors. (EXPERIMENTAL)
"""
- # i18n: "unstable" is a keyword
- getargs(x, 0, 0, _("unstable takes no arguments"))
- unstables = obsmod.getrevs(repo, 'unstable')
- return subset & unstables
+ # i18n: "orphan" is a keyword
+ getargs(x, 0, 0, _("orphan takes no arguments"))
+ orphan = obsmod.getrevs(repo, 'orphan')
+ return subset & orphan
-@predicate('user(string)', safe=True)
+@predicate('user(string)', safe=True, weight=10)
def user(repo, subset, x):
"""User name contains string. The match is case-insensitive.
@@ -1936,7 +2043,7 @@
"""
return author(repo, subset, x)
-@predicate('wdir()', safe=True)
+@predicate('wdir()', safe=True, weight=0)
def wdir(repo, subset, x):
"""Working directory. (EXPERIMENTAL)"""
# i18n: "wdir" is a keyword
@@ -1962,7 +2069,7 @@
raise ValueError
revs = [r]
except ValueError:
- revs = stringset(repo, subset, t)
+ revs = stringset(repo, subset, t, defineorder)
for r in revs:
if r in seen:
@@ -1991,7 +2098,7 @@
return baseset([r for r in ls if r in s])
# for internal use
-@predicate('_intlist', safe=True, takeorder=True)
+@predicate('_intlist', safe=True, takeorder=True, weight=0)
def _intlist(repo, subset, x, order):
if order == followorder:
# slow path to take the subset order
@@ -2026,6 +2133,7 @@
"string": stringset,
"symbol": stringset,
"and": andset,
+ "andsmally": andsmallyset,
"or": orset,
"not": notset,
"difference": differenceset,
@@ -2044,21 +2152,14 @@
# hook for extensions to execute code on the optimized tree
pass
-def match(ui, spec, repo=None, order=defineorder):
- """Create a matcher for a single revision spec
+def match(ui, spec, repo=None):
+ """Create a matcher for a single revision spec"""
+ return matchany(ui, [spec], repo=repo)
- If order=followorder, a matcher takes the ordering specified by the input
- set.
- """
- return matchany(ui, [spec], repo=repo, order=order)
-
-def matchany(ui, specs, repo=None, order=defineorder, localalias=None):
+def matchany(ui, specs, repo=None, localalias=None):
"""Create a matcher that will include any revisions matching one of the
given specs
- If order=followorder, a matcher takes the ordering specified by the input
- set.
-
If localalias is not None, it is a dict {name: definitionstring}. It takes
precedence over [revsetalias] config section.
"""
@@ -2087,17 +2188,22 @@
if aliases:
tree = revsetlang.expandaliases(tree, aliases, warn=warn)
tree = revsetlang.foldconcat(tree)
- tree = revsetlang.analyze(tree, order)
+ tree = revsetlang.analyze(tree)
tree = revsetlang.optimize(tree)
posttreebuilthook(tree, repo)
return makematcher(tree)
def makematcher(tree):
"""Create a matcher from an evaluatable tree"""
- def mfunc(repo, subset=None):
+ def mfunc(repo, subset=None, order=None):
+ if order is None:
+ if subset is None:
+ order = defineorder # 'x'
+ else:
+ order = followorder # 'subset & x'
if subset is None:
subset = fullreposet(repo)
- return getset(repo, subset, tree)
+ return getset(repo, subset, tree, order)
return mfunc
def loadpredicate(ui, extname, registrarobj):
--- a/mercurial/revsetlang.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/revsetlang.py Sat Sep 30 07:52:48 2017 -0700
@@ -49,6 +49,8 @@
keywords = {'and', 'or', 'not'}
+symbols = {}
+
_quoteletters = {'"', "'"}
_simpleopletters = set(pycompat.iterbytestr("()[]#:=,-|&+!~^%"))
@@ -78,7 +80,7 @@
letters of symbols, if ``c.isalnum() or c in '-._/@' or ord(c) > 127``.
Check that @ is a valid unquoted token character (issue3686):
- >>> list(tokenize("@::"))
+ >>> list(tokenize(b"@::"))
[('symbol', '@', 0), ('::', None, 1), ('end', None, 3)]
'''
@@ -239,79 +241,39 @@
return parser.buildargsdict(getlist(x), funcname, parser.splitargspec(keys),
keyvaluenode='keyvalue', keynode='symbol')
-def _isnamedfunc(x, funcname):
- """Check if given tree matches named function"""
- return x and x[0] == 'func' and getsymbol(x[1]) == funcname
-
-def _isposargs(x, n):
- """Check if given tree is n-length list of positional arguments"""
- l = getlist(x)
- return len(l) == n and all(y and y[0] != 'keyvalue' for y in l)
+# cache of {spec: raw parsed tree} built internally
+_treecache = {}
-def _matchnamedfunc(x, funcname):
- """Return args tree if given tree matches named function; otherwise None
+def _cachedtree(spec):
+ # thread safe because parse() is reentrant and dict.__setitem__() is atomic
+ tree = _treecache.get(spec)
+ if tree is None:
+ _treecache[spec] = tree = parse(spec)
+ return tree
- This can't be used for testing a nullary function since its args tree
- is also None. Use _isnamedfunc() instead.
- """
- if not _isnamedfunc(x, funcname):
- return
- return x[2]
+def _build(tmplspec, *repls):
+ """Create raw parsed tree from a template revset statement
-# Constants for ordering requirement, used in _analyze():
-#
-# If 'define', any nested functions and operations can change the ordering of
-# the entries in the set. If 'follow', any nested functions and operations
-# should take the ordering specified by the first operand to the '&' operator.
-#
-# For instance,
-#
-# X & (Y | Z)
-# ^ ^^^^^^^
-# | follow
-# define
-#
-# will be evaluated as 'or(y(x()), z(x()))', where 'x()' can change the order
-# of the entries in the set, but 'y()', 'z()' and 'or()' shouldn't.
-#
-# 'any' means the order doesn't matter. For instance,
-#
-# X & !Y
-# ^
-# any
-#
-# 'y()' can either enforce its ordering requirement or take the ordering
-# specified by 'x()' because 'not()' doesn't care the order.
-#
-# Transition of ordering requirement:
-#
-# 1. starts with 'define'
-# 2. shifts to 'follow' by 'x & y'
-# 3. changes back to 'define' on function call 'f(x)' or function-like
-# operation 'x (f) y' because 'f' may have its own ordering requirement
-# for 'x' and 'y' (e.g. 'first(x)')
-#
-anyorder = 'any' # don't care the order
-defineorder = 'define' # should define the order
-followorder = 'follow' # must follow the current order
+ >>> _build(b'f(_) and _', (b'string', b'1'), (b'symbol', b'2'))
+ ('and', ('func', ('symbol', 'f'), ('string', '1')), ('symbol', '2'))
+ """
+ template = _cachedtree(tmplspec)
+ return parser.buildtree(template, ('symbol', '_'), *repls)
+
+def _match(patspec, tree):
+ """Test if a tree matches the given pattern statement; return the matches
-# transition table for 'x & y', from the current expression 'x' to 'y'
-_tofolloworder = {
- anyorder: anyorder,
- defineorder: followorder,
- followorder: followorder,
-}
+ >>> _match(b'f(_)', parse(b'f()'))
+ >>> _match(b'f(_)', parse(b'f(1)'))
+ [('func', ('symbol', 'f'), ('symbol', '1')), ('symbol', '1')]
+ >>> _match(b'f(_)', parse(b'f(1, 2)'))
+ """
+ pattern = _cachedtree(patspec)
+ return parser.matchtree(pattern, tree, ('symbol', '_'),
+ {'keyvalue', 'list'})
def _matchonly(revs, bases):
- """
- >>> f = lambda *args: _matchonly(*map(parse, args))
- >>> f('ancestors(A)', 'not ancestors(B)')
- ('list', ('symbol', 'A'), ('symbol', 'B'))
- """
- ta = _matchnamedfunc(revs, 'ancestors')
- tb = bases and bases[0] == 'not' and _matchnamedfunc(bases[1], 'ancestors')
- if _isposargs(ta, 1) and _isposargs(tb, 1):
- return ('list', ta, tb)
+ return _match('ancestors(_) and not ancestors(_)', ('and', revs, bases))
def _fixops(x):
"""Rewrite raw parsed tree to resolve ambiguous syntax which cannot be
@@ -340,109 +302,90 @@
return (op,) + tuple(_fixops(y) for y in x[1:])
-def _analyze(x, order):
+def _analyze(x):
if x is None:
return x
op = x[0]
if op == 'minus':
- return _analyze(('and', x[1], ('not', x[2])), order)
+ return _analyze(_build('_ and not _', *x[1:]))
elif op == 'only':
- t = ('func', ('symbol', 'only'), ('list', x[1], x[2]))
- return _analyze(t, order)
+ return _analyze(_build('only(_, _)', *x[1:]))
elif op == 'onlypost':
- return _analyze(('func', ('symbol', 'only'), x[1]), order)
+ return _analyze(_build('only(_)', x[1]))
elif op == 'dagrangepre':
- return _analyze(('func', ('symbol', 'ancestors'), x[1]), order)
+ return _analyze(_build('ancestors(_)', x[1]))
elif op == 'dagrangepost':
- return _analyze(('func', ('symbol', 'descendants'), x[1]), order)
+ return _analyze(_build('descendants(_)', x[1]))
elif op == 'negate':
s = getstring(x[1], _("can't negate that"))
- return _analyze(('string', '-' + s), order)
+ return _analyze(('string', '-' + s))
elif op in ('string', 'symbol'):
return x
- elif op == 'and':
- ta = _analyze(x[1], order)
- tb = _analyze(x[2], _tofolloworder[order])
- return (op, ta, tb, order)
- elif op == 'or':
- return (op, _analyze(x[1], order), order)
- elif op == 'not':
- return (op, _analyze(x[1], anyorder), order)
elif op == 'rangeall':
- return (op, None, order)
- elif op in ('rangepre', 'rangepost', 'parentpost'):
- return (op, _analyze(x[1], defineorder), order)
+ return (op, None)
+ elif op in {'or', 'not', 'rangepre', 'rangepost', 'parentpost'}:
+ return (op, _analyze(x[1]))
elif op == 'group':
- return _analyze(x[1], order)
- elif op in ('dagrange', 'range', 'parent', 'ancestor', 'relation',
- 'subscript'):
- ta = _analyze(x[1], defineorder)
- tb = _analyze(x[2], defineorder)
- return (op, ta, tb, order)
+ return _analyze(x[1])
+ elif op in {'and', 'dagrange', 'range', 'parent', 'ancestor', 'relation',
+ 'subscript'}:
+ ta = _analyze(x[1])
+ tb = _analyze(x[2])
+ return (op, ta, tb)
elif op == 'relsubscript':
- ta = _analyze(x[1], defineorder)
- tb = _analyze(x[2], defineorder)
- tc = _analyze(x[3], defineorder)
- return (op, ta, tb, tc, order)
+ ta = _analyze(x[1])
+ tb = _analyze(x[2])
+ tc = _analyze(x[3])
+ return (op, ta, tb, tc)
elif op == 'list':
- return (op,) + tuple(_analyze(y, order) for y in x[1:])
+ return (op,) + tuple(_analyze(y) for y in x[1:])
elif op == 'keyvalue':
- return (op, x[1], _analyze(x[2], order))
+ return (op, x[1], _analyze(x[2]))
elif op == 'func':
- f = getsymbol(x[1])
- d = defineorder
- if f == 'present':
- # 'present(set)' is known to return the argument set with no
- # modification, so forward the current order to its argument
- d = order
- return (op, x[1], _analyze(x[2], d), order)
+ return (op, x[1], _analyze(x[2]))
raise ValueError('invalid operator %r' % op)
-def analyze(x, order=defineorder):
+def analyze(x):
"""Transform raw parsed tree to evaluatable tree which can be fed to
optimize() or getset()
All pseudo operations should be mapped to real operations or functions
defined in methods or symbols table respectively.
+ """
+ return _analyze(x)
- 'order' specifies how the current expression 'x' is ordered (see the
- constants defined above.)
- """
- return _analyze(x, order)
-
-def _optimize(x, small):
+def _optimize(x):
if x is None:
return 0, x
- smallbonus = 1
- if small:
- smallbonus = .5
-
op = x[0]
if op in ('string', 'symbol'):
- return smallbonus, x # single revisions are small
+ return 0.5, x # single revisions are small
elif op == 'and':
- wa, ta = _optimize(x[1], True)
- wb, tb = _optimize(x[2], True)
- order = x[3]
+ wa, ta = _optimize(x[1])
+ wb, tb = _optimize(x[2])
w = min(wa, wb)
+ # (draft/secret/_notpublic() & ::x) have a fast path
+ m = _match('_() & ancestors(_)', ('and', ta, tb))
+ if m and getsymbol(m[1]) in {'draft', 'secret', '_notpublic'}:
+ return w, _build('_phaseandancestors(_, _)', m[1], m[2])
+
# (::x and not ::y)/(not ::y and ::x) have a fast path
- tm = _matchonly(ta, tb) or _matchonly(tb, ta)
- if tm:
- return w, ('func', ('symbol', 'only'), tm, order)
+ m = _matchonly(ta, tb) or _matchonly(tb, ta)
+ if m:
+ return w, _build('only(_, _)', *m[1:])
- if tb is not None and tb[0] == 'not':
- return wa, ('difference', ta, tb[1], order)
-
+ m = _match('not _', tb)
+ if m:
+ return wa, ('difference', ta, m[1])
if wa > wb:
- return w, (op, tb, ta, order)
- return w, (op, ta, tb, order)
+ op = 'andsmally'
+ return w, (op, ta, tb)
elif op == 'or':
# fast path for machine-generated expression, that is likely to have
# lots of trivial revisions: 'a + b + c()' to '_list(a b) + c()'
- order = x[2]
ws, ts, ss = [], [], []
def flushss():
if not ss:
@@ -451,13 +394,13 @@
w, t = ss[0]
else:
s = '\0'.join(t[1] for w, t in ss)
- y = ('func', ('symbol', '_list'), ('string', s), order)
- w, t = _optimize(y, False)
+ y = _build('_list(_)', ('string', s))
+ w, t = _optimize(y)
ws.append(w)
ts.append(t)
del ss[:]
for y in getlist(x[1]):
- w, t = _optimize(y, False)
+ w, t = _optimize(y)
if t is not None and (t[0] == 'string' or t[0] == 'symbol'):
ss.append((w, t))
continue
@@ -467,66 +410,41 @@
flushss()
if len(ts) == 1:
return ws[0], ts[0] # 'or' operation is fully optimized out
- if order != defineorder:
- # reorder by weight only when f(a + b) == f(b + a)
- ts = [wt[1] for wt in sorted(zip(ws, ts), key=lambda wt: wt[0])]
- return max(ws), (op, ('list',) + tuple(ts), order)
+ return max(ws), (op, ('list',) + tuple(ts))
elif op == 'not':
# Optimize not public() to _notpublic() because we have a fast version
- if x[1][:3] == ('func', ('symbol', 'public'), None):
- order = x[1][3]
- newsym = ('func', ('symbol', '_notpublic'), None, order)
- o = _optimize(newsym, not small)
+ if _match('public()', x[1]):
+ o = _optimize(_build('_notpublic()'))
return o[0], o[1]
else:
- o = _optimize(x[1], not small)
- order = x[2]
- return o[0], (op, o[1], order)
+ o = _optimize(x[1])
+ return o[0], (op, o[1])
elif op == 'rangeall':
- return smallbonus, x
+ return 1, x
elif op in ('rangepre', 'rangepost', 'parentpost'):
- o = _optimize(x[1], small)
- order = x[2]
- return o[0], (op, o[1], order)
+ o = _optimize(x[1])
+ return o[0], (op, o[1])
elif op in ('dagrange', 'range'):
- wa, ta = _optimize(x[1], small)
- wb, tb = _optimize(x[2], small)
- order = x[3]
- return wa + wb, (op, ta, tb, order)
+ wa, ta = _optimize(x[1])
+ wb, tb = _optimize(x[2])
+ return wa + wb, (op, ta, tb)
elif op in ('parent', 'ancestor', 'relation', 'subscript'):
- w, t = _optimize(x[1], small)
- order = x[3]
- return w, (op, t, x[2], order)
+ w, t = _optimize(x[1])
+ return w, (op, t, x[2])
elif op == 'relsubscript':
- w, t = _optimize(x[1], small)
- order = x[4]
- return w, (op, t, x[2], x[3], order)
+ w, t = _optimize(x[1])
+ return w, (op, t, x[2], x[3])
elif op == 'list':
- ws, ts = zip(*(_optimize(y, small) for y in x[1:]))
+ ws, ts = zip(*(_optimize(y) for y in x[1:]))
return sum(ws), (op,) + ts
elif op == 'keyvalue':
- w, t = _optimize(x[2], small)
+ w, t = _optimize(x[2])
return w, (op, x[1], t)
elif op == 'func':
f = getsymbol(x[1])
- wa, ta = _optimize(x[2], small)
- if f in ('author', 'branch', 'closed', 'date', 'desc', 'file', 'grep',
- 'keyword', 'outgoing', 'user', 'destination'):
- w = 10 # slow
- elif f in ('modifies', 'adds', 'removes'):
- w = 30 # slower
- elif f == "contains":
- w = 100 # very slow
- elif f == "ancestor":
- w = 1 * smallbonus
- elif f in ('reverse', 'limit', 'first', 'wdir', '_intlist'):
- w = 0
- elif f == "sort":
- w = 10 # assume most sorts look at changelog
- else:
- w = 1
- order = x[3]
- return w + wa, (op, x[1], ta, order)
+ wa, ta = _optimize(x[2])
+ w = getattr(symbols.get(f), '_weight', 1)
+ return w + wa, (op, x[1], ta)
raise ValueError('invalid operator %r' % op)
def optimize(tree):
@@ -534,23 +452,23 @@
All pseudo operations should be transformed beforehand.
"""
- _weight, newtree = _optimize(tree, small=True)
+ _weight, newtree = _optimize(tree)
return newtree
# the set of valid characters for the initial letter of symbols in
# alias declarations and definitions
-_aliassyminitletters = _syminitletters | set(pycompat.sysstr('$'))
+_aliassyminitletters = _syminitletters | {'$'}
def _parsewith(spec, lookup=None, syminitletters=None):
"""Generate a parse tree of given spec with given tokenizing options
- >>> _parsewith('foo($1)', syminitletters=_aliassyminitletters)
+ >>> _parsewith(b'foo($1)', syminitletters=_aliassyminitletters)
('func', ('symbol', 'foo'), ('symbol', '$1'))
- >>> _parsewith('$1')
+ >>> _parsewith(b'$1')
Traceback (most recent call last):
...
ParseError: ("syntax error in revset '$1'", 0)
- >>> _parsewith('foo bar')
+ >>> _parsewith(b'foo bar')
Traceback (most recent call last):
...
ParseError: ('invalid token', 4)
@@ -620,11 +538,11 @@
def _quote(s):
r"""Quote a value in order to make it safe for the revset engine.
- >>> _quote('asdf')
+ >>> _quote(b'asdf')
"'asdf'"
- >>> _quote("asdf'\"")
+ >>> _quote(b"asdf'\"")
'\'asdf\\\'"\''
- >>> _quote('asdf\'')
+ >>> _quote(b'asdf\'')
"'asdf\\''"
>>> _quote(1)
"'1'"
@@ -648,19 +566,19 @@
Prefixing the type with 'l' specifies a parenthesized list of that type.
- >>> formatspec('%r:: and %lr', '10 or 11', ("this()", "that()"))
+ >>> formatspec(b'%r:: and %lr', b'10 or 11', (b"this()", b"that()"))
'(10 or 11):: and ((this()) or (that()))'
- >>> formatspec('%d:: and not %d::', 10, 20)
+ >>> formatspec(b'%d:: and not %d::', 10, 20)
'10:: and not 20::'
- >>> formatspec('%ld or %ld', [], [1])
+ >>> formatspec(b'%ld or %ld', [], [1])
"_list('') or 1"
- >>> formatspec('keyword(%s)', 'foo\\xe9')
+ >>> formatspec(b'keyword(%s)', b'foo\\xe9')
"keyword('foo\\\\xe9')"
- >>> b = lambda: 'default'
+ >>> b = lambda: b'default'
>>> b.branch = b
- >>> formatspec('branch(%b)', b)
+ >>> formatspec(b'branch(%b)', b)
"branch('default')"
- >>> formatspec('root(%ls)', ['a', 'b', 'c', 'd'])
+ >>> formatspec(b'root(%ls)', [b'a', b'b', b'c', b'd'])
"root(_list('a\\x00b\\x00c\\x00d'))"
'''
--- a/mercurial/scmutil.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/scmutil.py Sat Sep 30 07:52:48 2017 -0700
@@ -19,6 +19,7 @@
from .node import (
hex,
nullid,
+ short,
wdirid,
wdirrev,
)
@@ -163,7 +164,8 @@
ui.warn(_("(lock might be very busy)\n"))
except error.LockUnavailable as inst:
ui.warn(_("abort: could not lock %s: %s\n") %
- (inst.desc or inst.filename, inst.strerror))
+ (inst.desc or inst.filename,
+ encoding.strtolocal(inst.strerror)))
except error.OutOfBandError as inst:
if inst.args:
msg = _("abort: remote error:\n")
@@ -226,16 +228,18 @@
pass
elif getattr(inst, "strerror", None):
if getattr(inst, "filename", None):
- ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
+ ui.warn(_("abort: %s: %s\n") % (
+ encoding.strtolocal(inst.strerror), inst.filename))
else:
- ui.warn(_("abort: %s\n") % inst.strerror)
+ ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
else:
raise
except OSError as inst:
if getattr(inst, "filename", None) is not None:
- ui.warn(_("abort: %s: '%s'\n") % (inst.strerror, inst.filename))
+ ui.warn(_("abort: %s: '%s'\n") % (
+ encoding.strtolocal(inst.strerror), inst.filename))
else:
- ui.warn(_("abort: %s\n") % inst.strerror)
+ ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
except MemoryError:
ui.warn(_("abort: out of memory\n"))
except SystemExit as inst:
@@ -273,7 +277,7 @@
if abort or warn:
msg = util.checkwinfilename(f)
if msg:
- msg = "%s: %r" % (msg, f)
+ msg = "%s: %s" % (msg, util.shellquote(f))
if abort:
raise error.Abort(msg)
ui.warn(_("warning: %s\n") % msg)
@@ -402,11 +406,25 @@
return wdirrev
return rev
-def revsingle(repo, revspec, default='.'):
+def formatchangeid(ctx):
+ """Format changectx as '{rev}:{node|formatnode}', which is the default
+ template provided by cmdutil.changeset_templater"""
+ repo = ctx.repo()
+ return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
+
+def formatrevnode(ui, rev, node):
+ """Format given revision and node depending on the current verbosity"""
+ if ui.debugflag:
+ hexfunc = hex
+ else:
+ hexfunc = short
+ return '%d:%s' % (rev, hexfunc(node))
+
+def revsingle(repo, revspec, default='.', localalias=None):
if not revspec and revspec != 0:
return repo[default]
- l = revrange(repo, [revspec])
+ l = revrange(repo, [revspec], localalias=localalias)
if not l:
raise error.Abort(_('empty revision set'))
return repo[l.last()]
@@ -445,7 +463,7 @@
return repo.lookup(first), repo.lookup(second)
-def revrange(repo, specs):
+def revrange(repo, specs, localalias=None):
"""Execute 1 to many revsets and return the union.
This is the preferred mechanism for executing revsets using user-specified
@@ -471,7 +489,7 @@
if isinstance(spec, int):
spec = revsetlang.formatspec('rev(%d)', spec)
allspecs.append(spec)
- return repo.anyrevs(allspecs, user=True)
+ return repo.anyrevs(allspecs, user=True, localalias=localalias)
def meaningfulparents(repo, ctx):
"""Return list of meaningful (or all if debug) parentrevs for rev.
@@ -550,7 +568,7 @@
'''customize where .orig files are created
Fetch user defined path from config file: [ui] origbackuppath = <path>
- Fall back to default (filepath) if not specified
+ Fall back to default (filepath with .orig suffix) if not specified
'''
origbackuppath = ui.config('ui', 'origbackuppath')
if origbackuppath is None:
@@ -564,7 +582,7 @@
ui.note(_('creating directory: %s\n') % origbackupdir)
util.makedirs(origbackupdir)
- return fullorigpath + ".orig"
+ return fullorigpath
class _containsnode(object):
"""proxy __contains__(node) to container.__contains__ which accepts revs"""
@@ -761,8 +779,8 @@
ctx = repo[None]
dirstate = repo.dirstate
- walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
- full=False)
+ walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
+ unknown=True, ignored=False, full=False)
for abs, st in walkresults.iteritems():
dstate = dirstate[abs]
if dstate == '?' and audit_path.check(abs):
--- a/mercurial/simplemerge.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/simplemerge.py Sat Sep 30 07:52:48 2017 -0700
@@ -18,15 +18,12 @@
from __future__ import absolute_import
-import os
-
from .i18n import _
from . import (
error,
mdiff,
pycompat,
util,
- vfs as vfsmod,
)
class CantReprocessAndShowBase(Exception):
@@ -397,52 +394,54 @@
return unc
-def simplemerge(ui, local, base, other, **opts):
- def readfile(filename):
- f = open(filename, "rb")
- text = f.read()
- f.close()
- if util.binary(text):
- msg = _("%s looks like a binary file.") % filename
- if not opts.get('quiet'):
- ui.warn(_('warning: %s\n') % msg)
- if not opts.get('text'):
- raise error.Abort(msg)
- return text
+def _verifytext(text, path, ui, opts):
+ """verifies that text is non-binary (unless opts[text] is passed,
+ then we just warn)"""
+ if util.binary(text):
+ msg = _("%s looks like a binary file.") % path
+ if not opts.get('quiet'):
+ ui.warn(_('warning: %s\n') % msg)
+ if not opts.get('text'):
+ raise error.Abort(msg)
+ return text
+
+def _picklabels(defaults, overrides):
+ if len(overrides) > 3:
+ raise error.Abort(_("can only specify three labels."))
+ result = defaults[:]
+ for i, override in enumerate(overrides):
+ result[i] = override
+ return result
+
+def simplemerge(ui, localctx, basectx, otherctx, **opts):
+ """Performs the simplemerge algorithm.
+
+ The merged result is written into `localctx`.
+ """
+ def readctx(ctx):
+ # Merges were always run in the working copy before, which means
+ # they used decoded data, if the user defined any repository
+ # filters.
+ #
+ # Maintain that behavior today for BC, though perhaps in the future
+ # it'd be worth considering whether merging encoded data (what the
+ # repository usually sees) might be more useful.
+ return _verifytext(ctx.decodeddata(), ctx.path(), ui, opts)
mode = opts.get('mode','merge')
- if mode == 'union':
- name_a = None
- name_b = None
- name_base = None
- else:
- name_a = local
- name_b = other
- name_base = None
- labels = opts.get('label', [])
- if len(labels) > 0:
- name_a = labels[0]
- if len(labels) > 1:
- name_b = labels[1]
- if len(labels) > 2:
- name_base = labels[2]
- if len(labels) > 3:
- raise error.Abort(_("can only specify three labels."))
+ name_a, name_b, name_base = None, None, None
+ if mode != 'union':
+ name_a, name_b, name_base = _picklabels([localctx.path(),
+ otherctx.path(), None],
+ opts.get('label', []))
try:
- localtext = readfile(local)
- basetext = readfile(base)
- othertext = readfile(other)
+ localtext = readctx(localctx)
+ basetext = readctx(basectx)
+ othertext = readctx(otherctx)
except error.Abort:
return 1
- local = os.path.realpath(local)
- if not opts.get('print'):
- opener = vfsmod.vfs(os.path.dirname(local))
- out = opener(os.path.basename(local), "w", atomictemp=True)
- else:
- out = ui.fout
-
m3 = Merge3Text(basetext, localtext, othertext)
extrakwargs = {
"localorother": opts.get("localorother", None),
@@ -456,12 +455,17 @@
extrakwargs['base_marker'] = '|||||||'
extrakwargs['name_base'] = name_base
extrakwargs['minimize'] = False
+
+ mergedtext = ""
for line in m3.merge_lines(name_a=name_a, name_b=name_b,
**pycompat.strkwargs(extrakwargs)):
- out.write(line)
+ if opts.get('print'):
+ ui.fout.write(line)
+ else:
+ mergedtext += line
if not opts.get('print'):
- out.close()
+ localctx.write(mergedtext, localctx.flags())
if m3.conflicts and not mode == 'union':
return 1
--- a/mercurial/smartset.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/smartset.py Sat Sep 30 07:52:48 2017 -0700
@@ -357,7 +357,7 @@
def _fastsetop(self, other, op):
# try to use native set operations as fast paths
- if (type(other) is baseset and '_set' in other.__dict__ and '_set' in
+ if (type(other) is baseset and r'_set' in other.__dict__ and r'_set' in
self.__dict__ and self._ascending is not None):
s = baseset(data=getattr(self._set, op)(other._set),
istopo=self._istopo)
--- a/mercurial/sparse.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/sparse.py Sat Sep 30 07:52:48 2017 -0700
@@ -17,6 +17,7 @@
error,
match as matchmod,
merge as mergemod,
+ pathutil,
pycompat,
scmutil,
util,
@@ -616,7 +617,7 @@
def updateconfig(repo, pats, opts, include=False, exclude=False, reset=False,
delete=False, enableprofile=False, disableprofile=False,
- force=False):
+ force=False, usereporootpaths=False):
"""Perform a sparse config update.
Only one of the actions may be performed.
@@ -636,10 +637,24 @@
newexclude = set(oldexclude)
newprofiles = set(oldprofiles)
- if any(pat.startswith('/') for pat in pats):
- repo.ui.warn(_('warning: paths cannot start with /, ignoring: %s\n')
- % ([pat for pat in pats if pat.startswith('/')]))
- elif include:
+ if any(os.path.isabs(pat) for pat in pats):
+ raise error.Abort(_('paths cannot be absolute'))
+
+ if not usereporootpaths:
+ # let's treat paths as relative to cwd
+ root, cwd = repo.root, repo.getcwd()
+ abspats = []
+ for kindpat in pats:
+ kind, pat = matchmod._patsplit(kindpat, None)
+ if kind in matchmod.cwdrelativepatternkinds or kind is None:
+ ap = (kind + ':' if kind else '') +\
+ pathutil.canonpath(root, cwd, pat)
+ abspats.append(ap)
+ else:
+ abspats.append(kindpat)
+ pats = abspats
+
+ if include:
newinclude.update(pats)
elif exclude:
newexclude.update(pats)
--- a/mercurial/sshpeer.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/sshpeer.py Sat Sep 30 07:52:48 2017 -0700
@@ -17,21 +17,6 @@
wireproto,
)
-class remotelock(object):
- def __init__(self, repo):
- self.repo = repo
- def release(self):
- self.repo.unlock()
- self.repo = None
- def __enter__(self):
- return self
- def __exit__(self, exc_type, exc_val, exc_tb):
- if self.repo:
- self.release()
- def __del__(self):
- if self.repo:
- self.release()
-
def _serverquote(s):
if not s:
return s
@@ -132,8 +117,8 @@
class sshpeer(wireproto.wirepeer):
def __init__(self, ui, path, create=False):
self._url = path
- self.ui = ui
- self.pipeo = self.pipei = self.pipee = None
+ self._ui = ui
+ self._pipeo = self._pipei = self._pipee = None
u = util.url(path, parsequery=False, parsefragment=False)
if u.scheme != 'ssh' or not u.host or u.path is None:
@@ -141,22 +126,23 @@
util.checksafessh(path)
- self.user = u.user
if u.passwd is not None:
self._abort(error.RepoError(_("password in URL not supported")))
- self.host = u.host
- self.port = u.port
- self.path = u.path or "."
+
+ self._user = u.user
+ self._host = u.host
+ self._port = u.port
+ self._path = u.path or '.'
sshcmd = self.ui.config("ui", "ssh")
remotecmd = self.ui.config("ui", "remotecmd")
- args = util.sshargs(sshcmd, self.host, self.user, self.port)
+ args = util.sshargs(sshcmd, self._host, self._user, self._port)
if create:
cmd = '%s %s %s' % (sshcmd, args,
util.shellquote("%s init %s" %
- (_serverquote(remotecmd), _serverquote(self.path))))
+ (_serverquote(remotecmd), _serverquote(self._path))))
ui.debug('running %s\n' % cmd)
res = ui.system(cmd, blockedtag='sshpeer')
if res != 0:
@@ -164,31 +150,58 @@
self._validaterepo(sshcmd, args, remotecmd)
+ # Begin of _basepeer interface.
+
+ @util.propertycache
+ def ui(self):
+ return self._ui
+
def url(self):
return self._url
+ def local(self):
+ return None
+
+ def peer(self):
+ return self
+
+ def canpush(self):
+ return True
+
+ def close(self):
+ pass
+
+ # End of _basepeer interface.
+
+ # Begin of _basewirecommands interface.
+
+ def capabilities(self):
+ return self._caps
+
+ # End of _basewirecommands interface.
+
def _validaterepo(self, sshcmd, args, remotecmd):
# cleanup up previous run
- self.cleanup()
+ self._cleanup()
cmd = '%s %s %s' % (sshcmd, args,
util.shellquote("%s -R %s serve --stdio" %
- (_serverquote(remotecmd), _serverquote(self.path))))
+ (_serverquote(remotecmd), _serverquote(self._path))))
self.ui.debug('running %s\n' % cmd)
cmd = util.quotecommand(cmd)
- # while self.subprocess isn't used, having it allows the subprocess to
+ # while self._subprocess isn't used, having it allows the subprocess to
# to clean up correctly later
#
# no buffer allow the use of 'select'
# feel free to remove buffering and select usage when we ultimately
# move to threading.
sub = util.popen4(cmd, bufsize=0)
- self.pipeo, self.pipei, self.pipee, self.subprocess = sub
+ self._pipeo, self._pipei, self._pipee, self._subprocess = sub
- self.pipei = util.bufferedinputpipe(self.pipei)
- self.pipei = doublepipe(self.ui, self.pipei, self.pipee)
- self.pipeo = doublepipe(self.ui, self.pipeo, self.pipee)
+ self._pipei = util.bufferedinputpipe(self._pipei)
+ self._pipei = doublepipe(self.ui, self._pipei, self._pipee)
+ self._pipeo = doublepipe(self.ui, self._pipeo, self._pipee)
def badresponse():
self._abort(error.RepoError(_('no suitable response from '
@@ -206,7 +219,7 @@
while lines[-1] and max_noise:
try:
l = r.readline()
- self.readerr()
+ self._readerr()
if lines[-1] == "1\n" and l == "\n":
break
if l:
@@ -224,30 +237,27 @@
self._caps.update(l[:-1].split(":")[1].split())
break
- def _capabilities(self):
- return self._caps
-
- def readerr(self):
- _forwardoutput(self.ui, self.pipee)
+ def _readerr(self):
+ _forwardoutput(self.ui, self._pipee)
def _abort(self, exception):
- self.cleanup()
+ self._cleanup()
raise exception
- def cleanup(self):
- if self.pipeo is None:
+ def _cleanup(self):
+ if self._pipeo is None:
return
- self.pipeo.close()
- self.pipei.close()
+ self._pipeo.close()
+ self._pipei.close()
try:
# read the error descriptor until EOF
- for l in self.pipee:
+ for l in self._pipee:
self.ui.status(_("remote: "), l)
except (IOError, ValueError):
pass
- self.pipee.close()
+ self._pipee.close()
- __del__ = cleanup
+ __del__ = _cleanup
def _submitbatch(self, req):
rsp = self._callstream("batch", cmds=wireproto.encodebatchcmds(req))
@@ -271,7 +281,7 @@
def _callstream(self, cmd, **args):
args = pycompat.byteskwargs(args)
self.ui.debug("sending %s command\n" % cmd)
- self.pipeo.write("%s\n" % cmd)
+ self._pipeo.write("%s\n" % cmd)
_func, names = wireproto.commands[cmd]
keys = names.split()
wireargs = {}
@@ -283,16 +293,16 @@
wireargs[k] = args[k]
del args[k]
for k, v in sorted(wireargs.iteritems()):
- self.pipeo.write("%s %d\n" % (k, len(v)))
+ self._pipeo.write("%s %d\n" % (k, len(v)))
if isinstance(v, dict):
for dk, dv in v.iteritems():
- self.pipeo.write("%s %d\n" % (dk, len(dv)))
- self.pipeo.write(dv)
+ self._pipeo.write("%s %d\n" % (dk, len(dv)))
+ self._pipeo.write(dv)
else:
- self.pipeo.write(v)
- self.pipeo.flush()
+ self._pipeo.write(v)
+ self._pipeo.flush()
- return self.pipei
+ return self._pipei
def _callcompressable(self, cmd, **args):
return self._callstream(cmd, **args)
@@ -321,58 +331,29 @@
for d in iter(lambda: fp.read(4096), ''):
self._send(d)
self._send("", flush=True)
- return self.pipei
+ return self._pipei
def _getamount(self):
- l = self.pipei.readline()
+ l = self._pipei.readline()
if l == '\n':
- self.readerr()
+ self._readerr()
msg = _('check previous remote output')
self._abort(error.OutOfBandError(hint=msg))
- self.readerr()
+ self._readerr()
try:
return int(l)
except ValueError:
self._abort(error.ResponseError(_("unexpected response:"), l))
def _recv(self):
- return self.pipei.read(self._getamount())
+ return self._pipei.read(self._getamount())
def _send(self, data, flush=False):
- self.pipeo.write("%d\n" % len(data))
+ self._pipeo.write("%d\n" % len(data))
if data:
- self.pipeo.write(data)
+ self._pipeo.write(data)
if flush:
- self.pipeo.flush()
- self.readerr()
-
- def lock(self):
- self._call("lock")
- return remotelock(self)
-
- def unlock(self):
- self._call("unlock")
-
- def addchangegroup(self, cg, source, url, lock=None):
- '''Send a changegroup to the remote server. Return an integer
- similar to unbundle(). DEPRECATED, since it requires locking the
- remote.'''
- d = self._call("addchangegroup")
- if d:
- self._abort(error.RepoError(_("push refused: %s") % d))
- for d in iter(lambda: cg.read(4096), ''):
- self.pipeo.write(d)
- self.readerr()
-
- self.pipeo.flush()
-
- self.readerr()
- r = self._recv()
- if not r:
- return 1
- try:
- return int(r)
- except ValueError:
- self._abort(error.ResponseError(_("unexpected response:"), r))
+ self._pipeo.flush()
+ self._readerr()
instance = sshpeer
--- a/mercurial/store.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/store.py Sat Sep 30 07:52:48 2017 -0700
@@ -27,13 +27,13 @@
# foo.i or foo.d
def _encodedir(path):
'''
- >>> _encodedir('data/foo.i')
+ >>> _encodedir(b'data/foo.i')
'data/foo.i'
- >>> _encodedir('data/foo.i/bla.i')
+ >>> _encodedir(b'data/foo.i/bla.i')
'data/foo.i.hg/bla.i'
- >>> _encodedir('data/foo.i.hg/bla.i')
+ >>> _encodedir(b'data/foo.i.hg/bla.i')
'data/foo.i.hg.hg/bla.i'
- >>> _encodedir('data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
+ >>> _encodedir(b'data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
'''
return (path
@@ -45,11 +45,11 @@
def decodedir(path):
'''
- >>> decodedir('data/foo.i')
+ >>> decodedir(b'data/foo.i')
'data/foo.i'
- >>> decodedir('data/foo.i.hg/bla.i')
+ >>> decodedir(b'data/foo.i.hg/bla.i')
'data/foo.i/bla.i'
- >>> decodedir('data/foo.i.hg.hg/bla.i')
+ >>> decodedir(b'data/foo.i.hg.hg/bla.i')
'data/foo.i.hg/bla.i'
'''
if ".hg/" not in path:
@@ -80,24 +80,24 @@
'''
>>> enc, dec = _buildencodefun()
- >>> enc('nothing/special.txt')
+ >>> enc(b'nothing/special.txt')
'nothing/special.txt'
- >>> dec('nothing/special.txt')
+ >>> dec(b'nothing/special.txt')
'nothing/special.txt'
- >>> enc('HELLO')
+ >>> enc(b'HELLO')
'_h_e_l_l_o'
- >>> dec('_h_e_l_l_o')
+ >>> dec(b'_h_e_l_l_o')
'HELLO'
- >>> enc('hello:world?')
+ >>> enc(b'hello:world?')
'hello~3aworld~3f'
- >>> dec('hello~3aworld~3f')
+ >>> dec(b'hello~3aworld~3f')
'hello:world?'
- >>> enc('the\x07quick\xADshot')
+ >>> enc(b'the\\x07quick\\xADshot')
'the~07quick~adshot'
- >>> dec('the~07quick~adshot')
+ >>> dec(b'the~07quick~adshot')
'the\\x07quick\\xadshot'
'''
e = '_'
@@ -133,14 +133,14 @@
def encodefilename(s):
'''
- >>> encodefilename('foo.i/bar.d/bla.hg/hi:world?/HELLO')
+ >>> encodefilename(b'foo.i/bar.d/bla.hg/hi:world?/HELLO')
'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o'
'''
return _encodefname(encodedir(s))
def decodefilename(s):
'''
- >>> decodefilename('foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o')
+ >>> decodefilename(b'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o')
'foo.i/bar.d/bla.hg/hi:world?/HELLO'
'''
return decodedir(_decodefname(s))
@@ -148,21 +148,24 @@
def _buildlowerencodefun():
'''
>>> f = _buildlowerencodefun()
- >>> f('nothing/special.txt')
+ >>> f(b'nothing/special.txt')
'nothing/special.txt'
- >>> f('HELLO')
+ >>> f(b'HELLO')
'hello'
- >>> f('hello:world?')
+ >>> f(b'hello:world?')
'hello~3aworld~3f'
- >>> f('the\x07quick\xADshot')
+ >>> f(b'the\\x07quick\\xADshot')
'the~07quick~adshot'
'''
- cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
+ xchr = pycompat.bytechr
+ cmap = dict([(xchr(x), xchr(x)) for x in xrange(127)])
for x in _reserved():
- cmap[chr(x)] = "~%02x" % x
+ cmap[xchr(x)] = "~%02x" % x
for x in range(ord("A"), ord("Z") + 1):
- cmap[chr(x)] = chr(x).lower()
- return lambda s: "".join([cmap[c] for c in s])
+ cmap[xchr(x)] = xchr(x).lower()
+ def lowerencode(s):
+ return "".join([cmap[c] for c in pycompat.iterbytestr(s)])
+ return lowerencode
lowerencode = getattr(parsers, 'lowerencode', None) or _buildlowerencodefun()
@@ -180,15 +183,15 @@
basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux"
doesn't need encoding.
- >>> s = '.foo/aux.txt/txt.aux/con/prn/nul/foo.'
- >>> _auxencode(s.split('/'), True)
+ >>> s = b'.foo/aux.txt/txt.aux/con/prn/nul/foo.'
+ >>> _auxencode(s.split(b'/'), True)
['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e']
- >>> s = '.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.'
- >>> _auxencode(s.split('/'), False)
+ >>> s = b'.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.'
+ >>> _auxencode(s.split(b'/'), False)
['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e']
- >>> _auxencode(['foo. '], True)
+ >>> _auxencode([b'foo. '], True)
['foo.~20']
- >>> _auxencode([' .foo'], True)
+ >>> _auxencode([b' .foo'], True)
['~20.foo']
'''
for i, n in enumerate(path):
--- a/mercurial/subrepo.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/subrepo.py Sat Sep 30 07:52:48 2017 -0700
@@ -134,7 +134,7 @@
# However, we still want to allow back references to go
# through unharmed, so we turn r'\\1' into r'\1'. Again,
# extra escapes are needed because re.sub string decodes.
- repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl)
+ repl = re.sub(br'\\\\([0-9]+)', br'\\\1', repl)
try:
src = re.sub(pattern, repl, src, 1)
except re.error as e:
@@ -1154,7 +1154,7 @@
@propertycache
def _svnversion(self):
output, err = self._svncommand(['--version', '--quiet'], filename=None)
- m = re.search(r'^(\d+)\.(\d+)', output)
+ m = re.search(br'^(\d+)\.(\d+)', output)
if not m:
raise error.Abort(_('cannot retrieve svn tool version'))
return (int(m.group(1)), int(m.group(2)))
@@ -1346,7 +1346,8 @@
genericerror = _("error executing git for subrepo '%s': %s")
notfoundhint = _("check git is installed and in your PATH")
if e.errno != errno.ENOENT:
- raise error.Abort(genericerror % (self._path, e.strerror))
+ raise error.Abort(genericerror % (
+ self._path, encoding.strtolocal(e.strerror)))
elif pycompat.osname == 'nt':
try:
self._gitexecutable = 'git.cmd'
@@ -1358,7 +1359,7 @@
hint=notfoundhint)
else:
raise error.Abort(genericerror % (self._path,
- e2.strerror))
+ encoding.strtolocal(e2.strerror)))
else:
raise error.Abort(_("couldn't find git for subrepo '%s'")
% self._path, hint=notfoundhint)
@@ -1372,11 +1373,11 @@
@staticmethod
def _gitversion(out):
- m = re.search(r'^git version (\d+)\.(\d+)\.(\d+)', out)
+ m = re.search(br'^git version (\d+)\.(\d+)\.(\d+)', out)
if m:
return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
- m = re.search(r'^git version (\d+)\.(\d+)', out)
+ m = re.search(br'^git version (\d+)\.(\d+)', out)
if m:
return (int(m.group(1)), int(m.group(2)), 0)
@@ -1387,23 +1388,23 @@
'''ensure git version is new enough
>>> _checkversion = gitsubrepo._checkversion
- >>> _checkversion('git version 1.6.0')
+ >>> _checkversion(b'git version 1.6.0')
'ok'
- >>> _checkversion('git version 1.8.5')
+ >>> _checkversion(b'git version 1.8.5')
'ok'
- >>> _checkversion('git version 1.4.0')
+ >>> _checkversion(b'git version 1.4.0')
'abort'
- >>> _checkversion('git version 1.5.0')
+ >>> _checkversion(b'git version 1.5.0')
'warning'
- >>> _checkversion('git version 1.9-rc0')
+ >>> _checkversion(b'git version 1.9-rc0')
'ok'
- >>> _checkversion('git version 1.9.0.265.g81cdec2')
+ >>> _checkversion(b'git version 1.9.0.265.g81cdec2')
'ok'
- >>> _checkversion('git version 1.9.0.GIT')
+ >>> _checkversion(b'git version 1.9.0.GIT')
'ok'
- >>> _checkversion('git version 12345')
+ >>> _checkversion(b'git version 12345')
'unknown'
- >>> _checkversion('no')
+ >>> _checkversion(b'no')
'unknown'
'''
version = gitsubrepo._gitversion(out)
--- a/mercurial/tags.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/tags.py Sat Sep 30 07:52:48 2017 -0700
@@ -541,7 +541,7 @@
with repo.wlock():
repo.tags() # instantiate the cache
- _tag(repo.unfiltered(), names, node, message, local, user, date,
+ _tag(repo, names, node, message, local, user, date,
editor=editor)
def _tag(repo, names, node, message, local, user, date, extra=None,
--- a/mercurial/templatefilters.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/templatefilters.py Sat Sep 30 07:52:48 2017 -0700
@@ -275,19 +275,19 @@
"""Any text. Returns the name before an email address,
interpreting it as per RFC 5322.
- >>> person('foo@bar')
+ >>> person(b'foo@bar')
'foo'
- >>> person('Foo Bar <foo@bar>')
+ >>> person(b'Foo Bar <foo@bar>')
'Foo Bar'
- >>> person('"Foo Bar" <foo@bar>')
+ >>> person(b'"Foo Bar" <foo@bar>')
'Foo Bar'
- >>> person('"Foo \"buz\" Bar" <foo@bar>')
+ >>> person(b'"Foo \"buz\" Bar" <foo@bar>')
'Foo "buz" Bar'
>>> # The following are invalid, but do exist in real-life
...
- >>> person('Foo "buz" Bar <foo@bar>')
+ >>> person(b'Foo "buz" Bar <foo@bar>')
'Foo "buz" Bar'
- >>> person('"Foo Bar <foo@bar>')
+ >>> person(b'"Foo Bar <foo@bar>')
'Foo Bar'
"""
if '@' not in author:
--- a/mercurial/templatekw.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/templatekw.py Sat Sep 30 07:52:48 2017 -0700
@@ -11,7 +11,6 @@
from .node import (
hex,
nullid,
- short,
)
from . import (
@@ -49,10 +48,10 @@
return self._defaultgen()
def _defaultgen(self):
"""Generator to stringify this as {join(self, ' ')}"""
- for i, d in enumerate(self.itermaps()):
+ for i, x in enumerate(self._values):
if i > 0:
yield ' '
- yield self.joinfmt(d)
+ yield self.joinfmt(x)
def itermaps(self):
makemap = self._makemap
for x in self._values:
@@ -71,14 +70,36 @@
raise AttributeError(name)
return getattr(self._values, name)
+class _mappable(object):
+ """Wrapper for non-list/dict object to support map operation
+
+ This class allows us to handle both:
+ - "{manifest}"
+ - "{manifest % '{rev}:{node}'}"
+
+ Unlike a _hybrid, this does not simulate the behavior of the underling
+ value. Use unwrapvalue() or unwraphybrid() to obtain the inner object.
+ """
+
+ def __init__(self, gen, value, makemap):
+ self.gen = gen
+ self._value = value # may be generator of strings
+ self._makemap = makemap
+
+ def tomap(self):
+ return self._makemap()
+
+ def itermaps(self):
+ yield self.tomap()
+
def hybriddict(data, key='key', value='value', fmt='%s=%s', gen=None):
"""Wrap data to support both dict-like and string-like operations"""
return _hybrid(gen, data, lambda k: {key: k, value: data[k]},
- lambda d: fmt % (d[key], d[value]))
+ lambda k: fmt % (k, data[k]))
def hybridlist(data, name, fmt='%s', gen=None):
"""Wrap data to support both list-like and string-like operations"""
- return _hybrid(gen, data, lambda x: {name: x}, lambda d: fmt % d[name])
+ return _hybrid(gen, data, lambda x: {name: x}, lambda x: fmt % x)
def unwraphybrid(thing):
"""Return an object which can be stringified possibly by using a legacy
@@ -87,6 +108,12 @@
return thing
return thing.gen
+def unwrapvalue(thing):
+ """Move the inner value object out of the wrapper"""
+ if not util.safehasattr(thing, '_value'):
+ return thing
+ return thing._value
+
def showdict(name, data, mapping, plural=None, key='key', value='value',
fmt='%s=%s', separator=' '):
c = [{key: k, value: v} for k, v in data.iteritems()]
@@ -163,16 +190,6 @@
if endname in templ:
yield templ(endname, **strmapping)
-def _formatrevnode(ctx):
- """Format changeset as '{rev}:{node|formatnode}', which is the default
- template provided by cmdutil.changeset_templater"""
- repo = ctx.repo()
- if repo.ui.debugflag:
- hexfunc = hex
- else:
- hexfunc = short
- return '%d:%s' % (scmutil.intrev(ctx), hexfunc(scmutil.binnode(ctx)))
-
def getfiles(repo, ctx, revcache):
if 'files' not in revcache:
revcache['files'] = repo.status(ctx.p1(), ctx)[:3]
@@ -326,7 +343,7 @@
active = repo._activebookmark
makemap = lambda v: {'bookmark': v, 'active': active, 'current': active}
f = _showlist('bookmark', bookmarks, args)
- return _hybrid(f, bookmarks, makemap, lambda x: x['bookmark'])
+ return _hybrid(f, bookmarks, makemap, pycompat.identity)
@templatekeyword('children')
def showchildren(**args):
@@ -395,7 +412,7 @@
c = [makemap(k) for k in extras]
f = _showlist('extra', c, args, plural='extras')
return _hybrid(f, extras, makemap,
- lambda x: '%s=%s' % (x['key'], util.escapestr(x['value'])))
+ lambda k: '%s=%s' % (k, util.escapestr(extras[k])))
@templatekeyword('file_adds')
def showfileadds(**args):
@@ -481,6 +498,13 @@
else:
return 'o'
+@templatekeyword('graphwidth')
+def showgraphwidth(repo, ctx, templ, **args):
+ """Integer. The width of the graph drawn by 'log --graph' or zero."""
+ # The value args['graphwidth'] will be this function, so we use an internal
+ # name to pass the value through props into this function.
+ return args.get('_graphwidth', 0)
+
@templatekeyword('index')
def showindex(**args):
"""Integer. The current iteration of the loop. (0 indexed)"""
@@ -514,7 +538,7 @@
tags = latesttags[2]
f = _showlist('latesttag', tags, args, separator=':')
- return _hybrid(f, tags, makemap, lambda x: x['latesttag'])
+ return _hybrid(f, tags, makemap, pycompat.identity)
@templatekeyword('latesttagdistance')
def showlatesttagdistance(repo, ctx, templ, cache, **args):
@@ -547,10 +571,14 @@
if mnode is None:
# just avoid crash, we might want to use the 'ff...' hash in future
return
+ mrev = repo.manifestlog._revlog.rev(mnode)
+ mhex = hex(mnode)
args = args.copy()
- args.update({r'rev': repo.manifestlog._revlog.rev(mnode),
- r'node': hex(mnode)})
- return templ('manifest', **args)
+ args.update({r'rev': mrev, r'node': mhex})
+ f = templ('manifest', **args)
+ # TODO: perhaps 'ctx' should be dropped from mapping because manifest
+ # rev and node are completely different from changeset's.
+ return _mappable(f, f, lambda: {'rev': mrev, 'node': mhex})
def shownames(namespace, **args):
"""helper method to generate a template keyword for a namespace"""
@@ -588,7 +616,7 @@
'colorname': colornames[ns],
}
- return _hybrid(f, namespaces, makemap, lambda x: x['namespace'])
+ return _hybrid(f, namespaces, makemap, pycompat.identity)
@templatekeyword('node')
def shownode(repo, ctx, templ, **args):
@@ -622,7 +650,7 @@
# no hybriddict() since d['path'] can't be formatted as a string. perhaps
# hybriddict() should call templatefilters.stringify(d[value]).
return _hybrid(None, paths, lambda k: {'name': k, 'path': paths[k]},
- lambda d: '%s=%s' % (d['name'], d['path']['url']))
+ lambda k: '%s=%s' % (k, paths[k]['url']))
@templatekeyword("predecessors")
def showpredecessors(repo, ctx, **args):
@@ -633,7 +661,7 @@
return _hybrid(None, predecessors,
lambda x: {'ctx': repo[x], 'revcache': {}},
- lambda d: _formatrevnode(d['ctx']))
+ lambda x: scmutil.formatchangeid(repo[x]))
@templatekeyword("successorssets")
def showsuccessorssets(repo, ctx, **args):
@@ -651,7 +679,7 @@
data = []
for ss in ssets:
h = _hybrid(None, ss, lambda x: {'ctx': repo[x], 'revcache': {}},
- lambda d: _formatrevnode(d['ctx']))
+ lambda x: scmutil.formatchangeid(repo[x]))
data.append(h)
# Format the successorssets
@@ -665,7 +693,49 @@
yield "; ".join(render(d) for d in data)
return _hybrid(gen(data), data, lambda x: {'successorset': x},
- lambda d: d["successorset"])
+ pycompat.identity)
+
+@templatekeyword("succsandmarkers")
+def showsuccsandmarkers(repo, ctx, **args):
+ """Returns a list of dict for each final successor of ctx.
+
+ The dict contains successors node id in "successors" keys and the list of
+ obs-markers from ctx to the set of successors in "markers"
+
+ (EXPERIMENTAL)
+ """
+
+ values = obsutil.successorsandmarkers(repo, ctx)
+
+ if values is None:
+ values = []
+
+ # Format successors and markers to avoid exposing binary to templates
+ data = []
+ for i in values:
+ # Format successors
+ successors = i['successors']
+
+ successors = [hex(n) for n in successors]
+ successors = _hybrid(None, successors,
+ lambda x: {'ctx': repo[x], 'revcache': {}},
+ lambda x: scmutil.formatchangeid(repo[x]))
+
+ # Format markers
+ finalmarkers = []
+ for m in i['markers']:
+ hexprec = hex(m[0])
+ hexsucs = tuple(hex(n) for n in m[1])
+ hexparents = None
+ if m[5] is not None:
+ hexparents = tuple(hex(n) for n in m[5])
+ newmarker = (hexprec, hexsucs) + m[2:5] + (hexparents,) + m[6:]
+ finalmarkers.append(newmarker)
+
+ data.append({'successors': successors, 'markers': finalmarkers})
+
+ f = _showlist('succsandmarkers', data, args)
+ return _hybrid(f, data, lambda x: x, pycompat.identity)
@templatekeyword('p1rev')
def showp1rev(repo, ctx, templ, **args):
@@ -710,7 +780,7 @@
for p in pctxs]
f = _showlist('parent', parents, args)
return _hybrid(f, prevs, lambda x: {'ctx': repo[int(x)], 'revcache': {}},
- lambda d: _formatrevnode(d['ctx']))
+ lambda x: scmutil.formatchangeid(repo[int(x)]))
@templatekeyword('phase')
def showphase(repo, ctx, templ, **args):
@@ -737,7 +807,7 @@
f = _showlist(name, revs, args)
return _hybrid(f, revs,
lambda x: {name: x, 'ctx': repo[int(x)], 'revcache': {}},
- lambda d: d[name])
+ pycompat.identity)
@templatekeyword('subrepos')
def showsubrepos(**args):
@@ -772,18 +842,31 @@
keywords[name] = func
@templatekeyword('termwidth')
-def termwidth(repo, ctx, templ, **args):
+def showtermwidth(repo, ctx, templ, **args):
"""Integer. The width of the current terminal."""
return repo.ui.termwidth()
@templatekeyword('troubles')
-def showtroubles(**args):
+def showtroubles(repo, **args):
"""List of strings. Evolution troubles affecting the changeset.
+ (DEPRECATED)
+ """
+ msg = ("'troubles' is deprecated, "
+ "use 'instabilities'")
+ repo.ui.deprecwarn(msg, '4.4')
+
+ return showinstabilities(repo=repo, **args)
+
+@templatekeyword('instabilities')
+def showinstabilities(**args):
+ """List of strings. Evolution instabilities affecting the changeset.
+
(EXPERIMENTAL)
"""
args = pycompat.byteskwargs(args)
- return showlist('trouble', args['ctx'].troubles(), args)
+ return showlist('instability', args['ctx'].instabilities(), args,
+ plural='instabilities')
# tell hggettext to extract docstrings from these functions:
i18nfunctions = keywords.values()
--- a/mercurial/templater.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/templater.py Sat Sep 30 07:52:48 2017 -0700
@@ -5,7 +5,7 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
+from __future__ import absolute_import, print_function
import os
import re
@@ -18,6 +18,7 @@
encoding,
error,
minirst,
+ obsutil,
parser,
pycompat,
registrar,
@@ -33,7 +34,7 @@
elements = {
# token-type: binding-strength, primary, prefix, infix, suffix
"(": (20, None, ("group", 1, ")"), ("func", 1, ")"), None),
- "%": (16, None, None, ("%", 16), None),
+ "%": (15, None, None, ("%", 15), None),
"|": (15, None, None, ("|", 15), None),
"*": (5, None, None, ("*", 5), None),
"/": (5, None, None, ("/", 5), None),
@@ -146,15 +147,15 @@
def _parsetemplate(tmpl, start, stop, quote=''):
r"""
- >>> _parsetemplate('foo{bar}"baz', 0, 12)
+ >>> _parsetemplate(b'foo{bar}"baz', 0, 12)
([('string', 'foo'), ('symbol', 'bar'), ('string', '"baz')], 12)
- >>> _parsetemplate('foo{bar}"baz', 0, 12, quote='"')
+ >>> _parsetemplate(b'foo{bar}"baz', 0, 12, quote=b'"')
([('string', 'foo'), ('symbol', 'bar')], 9)
- >>> _parsetemplate('foo"{bar}', 0, 9, quote='"')
+ >>> _parsetemplate(b'foo"{bar}', 0, 9, quote=b'"')
([('string', 'foo')], 4)
- >>> _parsetemplate(r'foo\"bar"baz', 0, 12, quote='"')
+ >>> _parsetemplate(br'foo\"bar"baz', 0, 12, quote=b'"')
([('string', 'foo"'), ('string', 'bar')], 9)
- >>> _parsetemplate(r'foo\\"bar', 0, 10, quote='"')
+ >>> _parsetemplate(br'foo\\"bar', 0, 10, quote=b'"')
([('string', 'foo\\')], 6)
"""
parsed = []
@@ -168,7 +169,7 @@
parsed.append(('string', parser.unescapestr(tmpl[pos:stop])))
pos = stop
break
- c = tmpl[n]
+ c = tmpl[n:n + 1]
bs = (n - pos) - len(tmpl[pos:n].rstrip('\\'))
if bs % 2 == 1:
# escaped (e.g. '\{', '\\\{', but not '\\{')
@@ -191,20 +192,20 @@
"""Expand list of templates to node tuple
>>> def f(tree):
- ... print prettyformat(_unnesttemplatelist(tree))
- >>> f(('template', []))
- ('string', '')
- >>> f(('template', [('string', 'foo')]))
- ('string', 'foo')
- >>> f(('template', [('string', 'foo'), ('symbol', 'rev')]))
+ ... print(pycompat.sysstr(prettyformat(_unnesttemplatelist(tree))))
+ >>> f((b'template', []))
+ (string '')
+ >>> f((b'template', [(b'string', b'foo')]))
+ (string 'foo')
+ >>> f((b'template', [(b'string', b'foo'), (b'symbol', b'rev')]))
(template
- ('string', 'foo')
- ('symbol', 'rev'))
- >>> f(('template', [('symbol', 'rev')])) # template(rev) -> str
+ (string 'foo')
+ (symbol 'rev'))
+ >>> f((b'template', [(b'symbol', b'rev')])) # template(rev) -> str
(template
- ('symbol', 'rev'))
- >>> f(('template', [('template', [('string', 'foo')])]))
- ('string', 'foo')
+ (symbol 'rev'))
+ >>> f((b'template', [(b'template', [(b'string', b'foo')])]))
+ (string 'foo')
"""
if not isinstance(tree, tuple):
return tree
@@ -230,15 +231,15 @@
def _parseexpr(expr):
"""Parse a template expression into tree
- >>> _parseexpr('"foo"')
+ >>> _parseexpr(b'"foo"')
('string', 'foo')
- >>> _parseexpr('foo(bar)')
+ >>> _parseexpr(b'foo(bar)')
('func', ('symbol', 'foo'), ('symbol', 'bar'))
- >>> _parseexpr('foo(')
+ >>> _parseexpr(b'foo(')
Traceback (most recent call last):
...
ParseError: ('not a prefix: end', 4)
- >>> _parseexpr('"foo" "bar"')
+ >>> _parseexpr(b'"foo" "bar"')
Traceback (most recent call last):
...
ParseError: ('invalid token', 7)
@@ -297,11 +298,18 @@
else:
return None
-def evalfuncarg(context, mapping, arg):
+def evalrawexp(context, mapping, arg):
+ """Evaluate given argument as a bare template object which may require
+ further processing (such as folding generator of strings)"""
func, data = arg
- # func() may return string, generator of strings or arbitrary object such
- # as date tuple, but filter does not want generator.
- thing = func(context, mapping, data)
+ return func(context, mapping, data)
+
+def evalfuncarg(context, mapping, arg):
+ """Evaluate given argument as value type"""
+ thing = evalrawexp(context, mapping, arg)
+ thing = templatekw.unwrapvalue(thing)
+ # evalrawexp() may return string, generator of strings or arbitrary object
+ # such as date tuple, but filter does not want generator.
if isinstance(thing, types.GeneratorType):
thing = stringify(thing)
return thing
@@ -316,6 +324,7 @@
thing = util.parsebool(data)
else:
thing = func(context, mapping, data)
+ thing = templatekw.unwrapvalue(thing)
if isinstance(thing, bool):
return thing
# other objects are evaluated as strings, which means 0 is True, but
@@ -330,8 +339,7 @@
raise error.ParseError(err)
def evalstring(context, mapping, arg):
- func, data = arg
- return stringify(func(context, mapping, data))
+ return stringify(evalrawexp(context, mapping, arg))
def evalstringliteral(context, mapping, arg):
"""Evaluate given argument as string template, but returns symbol name
@@ -379,8 +387,8 @@
return (runtemplate, ctmpl)
def runtemplate(context, mapping, template):
- for func, data in template:
- yield func(context, mapping, data)
+ for arg in template:
+ yield evalrawexp(context, mapping, arg)
def buildfilter(exp, context):
n = getsymbol(exp[2])
@@ -409,21 +417,22 @@
raise error.Abort(msg)
def buildmap(exp, context):
- func, data = compileexp(exp[1], context, methods)
- tfunc, tdata = gettemplate(exp[2], context)
- return (runmap, (func, data, tfunc, tdata))
+ darg = compileexp(exp[1], context, methods)
+ targ = gettemplate(exp[2], context)
+ return (runmap, (darg, targ))
def runmap(context, mapping, data):
- func, data, tfunc, tdata = data
- d = func(context, mapping, data)
+ darg, targ = data
+ d = evalrawexp(context, mapping, darg)
if util.safehasattr(d, 'itermaps'):
diter = d.itermaps()
else:
try:
diter = iter(d)
except TypeError:
- if func is runsymbol:
- raise error.ParseError(_("keyword '%s' is not iterable") % data)
+ sym = findsymbolicname(darg)
+ if sym:
+ raise error.ParseError(_("keyword '%s' is not iterable") % sym)
else:
raise error.ParseError(_("%r is not iterable") % d)
@@ -433,7 +442,7 @@
if isinstance(v, dict):
lm.update(v)
lm['originalnode'] = mapping.get('node')
- yield tfunc(context, lm, tdata)
+ yield evalrawexp(context, lm, targ)
else:
# v is not an iterable of dicts, this happen when 'key'
# has been fully expanded already and format is useless.
@@ -488,10 +497,10 @@
... x = _parseexpr(expr)
... n = getsymbol(x[1])
... return _buildfuncargs(x[2], context, exprmethods, n, argspec)
- >>> fargs('a(l=1, k=2)', 'k l m').keys()
+ >>> list(fargs(b'a(l=1, k=2)', b'k l m').keys())
['l', 'k']
- >>> args = fargs('a(opts=1, k=2)', '**opts')
- >>> args.keys(), args['opts'].keys()
+ >>> args = fargs(b'a(opts=1, k=2)', b'**opts')
+ >>> list(args.keys()), list(args[b'opts'].keys())
(['opts'], ['opts', 'k'])
"""
def compiledict(xs):
@@ -716,9 +725,9 @@
test = evalboolean(context, mapping, args[0])
if test:
- yield args[1][0](context, mapping, args[1][1])
+ yield evalrawexp(context, mapping, args[1])
elif len(args) == 3:
- yield args[2][0](context, mapping, args[2][1])
+ yield evalrawexp(context, mapping, args[2])
@templatefunc('ifcontains(needle, haystack, then[, else])')
def ifcontains(context, mapping, args):
@@ -732,9 +741,9 @@
haystack = evalfuncarg(context, mapping, args[1])
if needle in haystack:
- yield args[2][0](context, mapping, args[2][1])
+ yield evalrawexp(context, mapping, args[2])
elif len(args) == 4:
- yield args[3][0](context, mapping, args[3][1])
+ yield evalrawexp(context, mapping, args[3])
@templatefunc('ifeq(expr1, expr2, then[, else])')
def ifeq(context, mapping, args):
@@ -747,9 +756,9 @@
test = evalstring(context, mapping, args[0])
match = evalstring(context, mapping, args[1])
if test == match:
- yield args[2][0](context, mapping, args[2][1])
+ yield evalrawexp(context, mapping, args[2])
elif len(args) == 4:
- yield args[3][0](context, mapping, args[3][1])
+ yield evalrawexp(context, mapping, args[3])
@templatefunc('join(list, sep)')
def join(context, mapping, args):
@@ -758,11 +767,11 @@
# i18n: "join" is a keyword
raise error.ParseError(_("join expects one or two arguments"))
- joinset = args[0][0](context, mapping, args[0][1])
- if util.safehasattr(joinset, 'itermaps'):
- jf = joinset.joinfmt
- joinset = [jf(x) for x in joinset.itermaps()]
-
+ # TODO: perhaps this should be evalfuncarg(), but it can't because hgweb
+ # abuses generator as a keyword that returns a list of dicts.
+ joinset = evalrawexp(context, mapping, args[0])
+ joinset = templatekw.unwrapvalue(joinset)
+ joinfmt = getattr(joinset, 'joinfmt', pycompat.identity)
joiner = " "
if len(args) > 1:
joiner = evalstring(context, mapping, args[1])
@@ -773,7 +782,7 @@
first = False
else:
yield joiner
- yield x
+ yield joinfmt(x)
@templatefunc('label(label, expr)')
def label(context, mapping, args):
@@ -839,6 +848,34 @@
tzoffset = util.makedate()[1]
return (date[0], tzoffset)
+@templatefunc('max(iterable)')
+def max_(context, mapping, args, **kwargs):
+ """Return the max of an iterable"""
+ if len(args) != 1:
+ # i18n: "max" is a keyword
+ raise error.ParseError(_("max expects one arguments"))
+
+ iterable = evalfuncarg(context, mapping, args[0])
+ try:
+ return max(iterable)
+ except (TypeError, ValueError):
+ # i18n: "max" is a keyword
+ raise error.ParseError(_("max first argument should be an iterable"))
+
+@templatefunc('min(iterable)')
+def min_(context, mapping, args, **kwargs):
+ """Return the min of an iterable"""
+ if len(args) != 1:
+ # i18n: "min" is a keyword
+ raise error.ParseError(_("min expects one arguments"))
+
+ iterable = evalfuncarg(context, mapping, args[0])
+ try:
+ return min(iterable)
+ except (TypeError, ValueError):
+ # i18n: "min" is a keyword
+ raise error.ParseError(_("min first argument should be an iterable"))
+
@templatefunc('mod(a, b)')
def mod(context, mapping, args):
"""Calculate a mod b such that a / b + a mod b == a"""
@@ -849,6 +886,74 @@
func = lambda a, b: a % b
return runarithmetic(context, mapping, (func, args[0], args[1]))
+@templatefunc('obsfateoperations(markers)')
+def obsfateoperations(context, mapping, args):
+ """Compute obsfate related information based on markers (EXPERIMENTAL)"""
+ if len(args) != 1:
+ # i18n: "obsfateoperations" is a keyword
+ raise error.ParseError(_("obsfateoperations expects one arguments"))
+
+ markers = evalfuncarg(context, mapping, args[0])
+
+ try:
+ data = obsutil.markersoperations(markers)
+ return templatekw.hybridlist(data, name='operation')
+ except (TypeError, KeyError):
+ # i18n: "obsfateoperations" is a keyword
+ errmsg = _("obsfateoperations first argument should be an iterable")
+ raise error.ParseError(errmsg)
+
+@templatefunc('obsfatedate(markers)')
+def obsfatedate(context, mapping, args):
+ """Compute obsfate related information based on markers (EXPERIMENTAL)"""
+ if len(args) != 1:
+ # i18n: "obsfatedate" is a keyword
+ raise error.ParseError(_("obsfatedate expects one arguments"))
+
+ markers = evalfuncarg(context, mapping, args[0])
+
+ try:
+ data = obsutil.markersdates(markers)
+ return templatekw.hybridlist(data, name='date', fmt='%d %d')
+ except (TypeError, KeyError):
+ # i18n: "obsfatedate" is a keyword
+ errmsg = _("obsfatedate first argument should be an iterable")
+ raise error.ParseError(errmsg)
+
+@templatefunc('obsfateusers(markers)')
+def obsfateusers(context, mapping, args):
+ """Compute obsfate related information based on markers (EXPERIMENTAL)"""
+ if len(args) != 1:
+ # i18n: "obsfateusers" is a keyword
+ raise error.ParseError(_("obsfateusers expects one arguments"))
+
+ markers = evalfuncarg(context, mapping, args[0])
+
+ try:
+ data = obsutil.markersusers(markers)
+ return templatekw.hybridlist(data, name='user')
+ except (TypeError, KeyError, ValueError):
+ # i18n: "obsfateusers" is a keyword
+ msg = _("obsfateusers first argument should be an iterable of "
+ "obsmakers")
+ raise error.ParseError(msg)
+
+@templatefunc('obsfateverb(successors)')
+def obsfateverb(context, mapping, args):
+ """Compute obsfate related information based on successors (EXPERIMENTAL)"""
+ if len(args) != 1:
+ # i18n: "obsfateverb" is a keyword
+ raise error.ParseError(_("obsfateverb expects one arguments"))
+
+ successors = evalfuncarg(context, mapping, args[0])
+
+ try:
+ return obsutil.successorsetverb(successors)
+ except TypeError:
+ # i18n: "obsfateverb" is a keyword
+ errmsg = _("obsfateverb first argument should be countable")
+ raise error.ParseError(errmsg)
+
@templatefunc('relpath(path)')
def relpath(context, mapping, args):
"""Convert a repository-absolute path into a filesystem path relative to
@@ -943,41 +1048,7 @@
# which would be unacceptably slow. so we look for hash collision in
# unfiltered space, which means some hashes may be slightly longer.
cl = mapping['ctx']._repo.unfiltered().changelog
- def isvalid(test):
- try:
- if cl._partialmatch(test) is None:
- return False
-
- try:
- i = int(test)
- # if we are a pure int, then starting with zero will not be
- # confused as a rev; or, obviously, if the int is larger than
- # the value of the tip rev
- if test[0] == '0' or i > len(cl):
- return True
- return False
- except ValueError:
- return True
- except error.RevlogError:
- return False
- except error.WdirUnsupported:
- # single 'ff...' match
- return True
-
- shortest = node
- startlength = max(6, minlength)
- length = startlength
- while True:
- test = node[:length]
- if isvalid(test):
- shortest = test
- if length == minlength or length > startlength:
- return shortest
- length -= 1
- else:
- length += 1
- if len(shortest) <= length:
- return shortest
+ return cl.shortest(node, minlength)
@templatefunc('strip(text[, chars])')
def strip(context, mapping, args):
@@ -1300,6 +1371,7 @@
def render(self, mapping):
"""Render the default unnamed template and return result as string"""
+ mapping = pycompat.strkwargs(mapping)
return stringify(self('', **mapping))
def __call__(self, t, **mapping):
--- a/mercurial/templates/map-cmdline.default Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/templates/map-cmdline.default Sat Sep 30 07:52:48 2017 -0700
@@ -28,8 +28,8 @@
% ' {name} ({source})'}\n"))}'
# General templates
-_trouble_label = 'trouble.{trouble}'
-_troubles_labels = '{if(troubles, "changeset.troubled {troubles%_trouble_label}")}'
+_instability_label = 'instability.{instability}'
+_troubles_labels = '{if(instabilities, "changeset.unstable {instabilities%_instability_label}")}'
_obsolete_label = '{if(obsolete, "changeset.obsolete")}'
_cset_labels = '{separate(" ", "log.changeset", "changeset.{phase}", "{_obsolete_label}", "{_troubles_labels}")}'
cset = '{label("{_cset_labels}",
@@ -68,8 +68,8 @@
ldate = '{label("log.date",
"date: {date|date}")}\n'
-ltroubles = '{if(troubles, "{label('log.trouble',
- 'trouble: {join(troubles, ", ")}')}\n")}'
+ltroubles = '{if(instabilities, "{label('log.instability',
+ 'instability: {join(instabilities, ", ")}')}\n")}'
extra = '{label("ui.debug log.extra",
"extra: {key}={value|stringescape}")}\n'
--- a/mercurial/templates/map-cmdline.show Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/templates/map-cmdline.show Sat Sep 30 07:52:48 2017 -0700
@@ -3,12 +3,12 @@
# piggyback on existing values so color works.
# * Obsolescence isn't considered for node labels. See _cset_labels in
# map-cmdline.default.
-showbookmarks = '{if(active, "*", " ")} {pad(bookmark, longestbookmarklen + 4)}{shortest(node, 5)}\n'
+showbookmarks = '{if(active, "*", " ")} {pad(bookmark, longestbookmarklen + 4)}{shortest(node, nodelen)}\n'
showwork = '{cset_shortnode}{namespaces % cset_namespace} {cset_shortdesc}'
showstack = '{showwork}'
-cset_shortnode = '{label("log.changeset changeset.{phase}", shortest(node, 5))}'
+cset_shortnode = '{label("log.changeset changeset.{phase}", shortest(node, nodelen))}'
# Treat branch and tags specially so we don't display "default" or "tip"
cset_namespace = '{ifeq(namespace, "branches", names_branches, ifeq(namespace, "tags", names_tags, names_others))}'
--- a/mercurial/transaction.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/transaction.py Sat Sep 30 07:52:48 2017 -0700
@@ -101,7 +101,7 @@
# only pure backup file remains, it is sage to ignore any error
pass
-class transaction(object):
+class transaction(util.transactional):
def __init__(self, report, opener, vfsmap, journalname, undoname=None,
after=None, createmode=None, validator=None, releasefn=None,
checkambigfiles=None):
@@ -376,16 +376,6 @@
if self.count > 0 and self.usages == 0:
self._abort()
- def __enter__(self):
- return self
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- try:
- if exc_type is None:
- self.close()
- finally:
- self.release()
-
def running(self):
return self.count > 0
--- a/mercurial/ui.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/ui.py Sat Sep 30 07:52:48 2017 -0700
@@ -60,7 +60,7 @@
samplehgrcs = {
'user':
-"""# example user config (see 'hg help config' for more info)
+b"""# example user config (see 'hg help config' for more info)
[ui]
# name and email, e.g.
# username = Jane Doe <jdoe@example.com>
@@ -82,7 +82,7 @@
""",
'cloned':
-"""# example repository config (see 'hg help config' for more info)
+b"""# example repository config (see 'hg help config' for more info)
[paths]
default = %s
@@ -99,7 +99,7 @@
""",
'local':
-"""# example repository config (see 'hg help config' for more info)
+b"""# example repository config (see 'hg help config' for more info)
[paths]
# path aliases to other clones of this repo in URLs or filesystem paths
# (see 'hg help config.paths' for more info)
@@ -115,7 +115,7 @@
""",
'global':
-"""# example system-wide hg config (see 'hg help config' for more info)
+b"""# example system-wide hg config (see 'hg help config' for more info)
[ui]
# uncomment to disable color in command output
@@ -531,19 +531,19 @@
def configbool(self, section, name, default=_unset, untrusted=False):
"""parse a configuration element as a boolean
- >>> u = ui(); s = 'foo'
- >>> u.setconfig(s, 'true', 'yes')
- >>> u.configbool(s, 'true')
+ >>> u = ui(); s = b'foo'
+ >>> u.setconfig(s, b'true', b'yes')
+ >>> u.configbool(s, b'true')
True
- >>> u.setconfig(s, 'false', 'no')
- >>> u.configbool(s, 'false')
+ >>> u.setconfig(s, b'false', b'no')
+ >>> u.configbool(s, b'false')
False
- >>> u.configbool(s, 'unknown')
+ >>> u.configbool(s, b'unknown')
False
- >>> u.configbool(s, 'unknown', True)
+ >>> u.configbool(s, b'unknown', True)
True
- >>> u.setconfig(s, 'invalid', 'somevalue')
- >>> u.configbool(s, 'invalid')
+ >>> u.setconfig(s, b'invalid', b'somevalue')
+ >>> u.configbool(s, b'invalid')
Traceback (most recent call last):
...
ConfigError: foo.invalid is not a boolean ('somevalue')
@@ -568,21 +568,21 @@
desc=None, untrusted=False):
"""parse a configuration element with a conversion function
- >>> u = ui(); s = 'foo'
- >>> u.setconfig(s, 'float1', '42')
- >>> u.configwith(float, s, 'float1')
+ >>> u = ui(); s = b'foo'
+ >>> u.setconfig(s, b'float1', b'42')
+ >>> u.configwith(float, s, b'float1')
42.0
- >>> u.setconfig(s, 'float2', '-4.25')
- >>> u.configwith(float, s, 'float2')
+ >>> u.setconfig(s, b'float2', b'-4.25')
+ >>> u.configwith(float, s, b'float2')
-4.25
- >>> u.configwith(float, s, 'unknown', 7)
+ >>> u.configwith(float, s, b'unknown', 7)
7.0
- >>> u.setconfig(s, 'invalid', 'somevalue')
- >>> u.configwith(float, s, 'invalid')
+ >>> u.setconfig(s, b'invalid', b'somevalue')
+ >>> u.configwith(float, s, b'invalid')
Traceback (most recent call last):
...
ConfigError: foo.invalid is not a valid float ('somevalue')
- >>> u.configwith(float, s, 'invalid', desc='womble')
+ >>> u.configwith(float, s, b'invalid', desc=b'womble')
Traceback (most recent call last):
...
ConfigError: foo.invalid is not a valid womble ('somevalue')
@@ -595,24 +595,24 @@
return convert(v)
except (ValueError, error.ParseError):
if desc is None:
- desc = convert.__name__
+ desc = pycompat.sysbytes(convert.__name__)
raise error.ConfigError(_("%s.%s is not a valid %s ('%s')")
% (section, name, desc, v))
def configint(self, section, name, default=_unset, untrusted=False):
"""parse a configuration element as an integer
- >>> u = ui(); s = 'foo'
- >>> u.setconfig(s, 'int1', '42')
- >>> u.configint(s, 'int1')
+ >>> u = ui(); s = b'foo'
+ >>> u.setconfig(s, b'int1', b'42')
+ >>> u.configint(s, b'int1')
42
- >>> u.setconfig(s, 'int2', '-42')
- >>> u.configint(s, 'int2')
+ >>> u.setconfig(s, b'int2', b'-42')
+ >>> u.configint(s, b'int2')
-42
- >>> u.configint(s, 'unknown', 7)
+ >>> u.configint(s, b'unknown', 7)
7
- >>> u.setconfig(s, 'invalid', 'somevalue')
- >>> u.configint(s, 'invalid')
+ >>> u.setconfig(s, b'invalid', b'somevalue')
+ >>> u.configint(s, b'invalid')
Traceback (most recent call last):
...
ConfigError: foo.invalid is not a valid integer ('somevalue')
@@ -627,17 +627,17 @@
Units can be specified as b (bytes), k or kb (kilobytes), m or
mb (megabytes), g or gb (gigabytes).
- >>> u = ui(); s = 'foo'
- >>> u.setconfig(s, 'val1', '42')
- >>> u.configbytes(s, 'val1')
+ >>> u = ui(); s = b'foo'
+ >>> u.setconfig(s, b'val1', b'42')
+ >>> u.configbytes(s, b'val1')
42
- >>> u.setconfig(s, 'val2', '42.5 kb')
- >>> u.configbytes(s, 'val2')
+ >>> u.setconfig(s, b'val2', b'42.5 kb')
+ >>> u.configbytes(s, b'val2')
43520
- >>> u.configbytes(s, 'unknown', '7 MB')
+ >>> u.configbytes(s, b'unknown', b'7 MB')
7340032
- >>> u.setconfig(s, 'invalid', 'somevalue')
- >>> u.configbytes(s, 'invalid')
+ >>> u.setconfig(s, b'invalid', b'somevalue')
+ >>> u.configbytes(s, b'invalid')
Traceback (most recent call last):
...
ConfigError: foo.invalid is not a byte quantity ('somevalue')
@@ -660,9 +660,9 @@
"""parse a configuration element as a list of comma/space separated
strings
- >>> u = ui(); s = 'foo'
- >>> u.setconfig(s, 'list1', 'this,is "a small" ,test')
- >>> u.configlist(s, 'list1')
+ >>> u = ui(); s = b'foo'
+ >>> u.setconfig(s, b'list1', b'this,is "a small" ,test')
+ >>> u.configlist(s, b'list1')
['this', 'is', 'a small', 'test']
"""
# default is not always a list
@@ -677,9 +677,9 @@
def configdate(self, section, name, default=_unset, untrusted=False):
"""parse a configuration element as a tuple of ints
- >>> u = ui(); s = 'foo'
- >>> u.setconfig(s, 'date', '0 0')
- >>> u.configdate(s, 'date')
+ >>> u = ui(); s = b'foo'
+ >>> u.setconfig(s, b'date', b'0 0')
+ >>> u.configdate(s, b'date')
(0, 0)
"""
if self.config(section, name, default, untrusted):
@@ -962,6 +962,7 @@
# formatted() will need some adjustment.
or not self.formatted()
or self.plain()
+ or self._buffers
# TODO: expose debugger-enabled on the UI object
or '--debugger' in pycompat.sysargv):
# We only want to paginate if the ui appears to be
@@ -1220,18 +1221,10 @@
self.write(prompt, prompt=True)
self.flush()
- # instead of trying to emulate raw_input, swap (self.fin,
- # self.fout) with (sys.stdin, sys.stdout)
- oldin = sys.stdin
- oldout = sys.stdout
- sys.stdin = self.fin
- sys.stdout = self.fout
# prompt ' ' must exist; otherwise readline may delete entire line
# - http://bugs.python.org/issue12833
with self.timeblockedsection('stdio'):
- line = raw_input(' ')
- sys.stdin = oldin
- sys.stdout = oldout
+ line = util.bytesinput(self.fin, self.fout, r' ')
# When stdin is in binary mode on Windows, it can cause
# raw_input() to emit an extra trailing carriage return
@@ -1263,11 +1256,11 @@
This returns tuple "(message, choices)", and "choices" is the
list of tuple "(response character, text without &)".
- >>> ui.extractchoices("awake? $$ &Yes $$ &No")
+ >>> ui.extractchoices(b"awake? $$ &Yes $$ &No")
('awake? ', [('y', 'Yes'), ('n', 'No')])
- >>> ui.extractchoices("line\\nbreak? $$ &Yes $$ &No")
+ >>> ui.extractchoices(b"line\\nbreak? $$ &Yes $$ &No")
('line\\nbreak? ', [('y', 'Yes'), ('n', 'No')])
- >>> ui.extractchoices("want lots of $$money$$?$$Ye&s$$N&o")
+ >>> ui.extractchoices(b"want lots of $$money$$?$$Ye&s$$N&o")
('want lots of $$money$$?', [('s', 'Yes'), ('o', 'No')])
"""
@@ -1279,9 +1272,10 @@
m = re.match(br'(?s)(.+?)\$\$([^\$]*&[^ \$].*)', prompt)
msg = m.group(1)
choices = [p.strip(' ') for p in m.group(2).split('$$')]
- return (msg,
- [(s[s.index('&') + 1].lower(), s.replace('&', '', 1))
- for s in choices])
+ def choicetuple(s):
+ ampidx = s.index('&')
+ return s[ampidx + 1:ampidx + 2].lower(), s.replace('&', '', 1)
+ return (msg, [choicetuple(s) for s in choices])
def promptchoice(self, prompt, default=0):
"""Prompt user with a message, read response, and ensure it matches
@@ -1352,20 +1346,33 @@
self.write(*msg, **opts)
def edit(self, text, user, extra=None, editform=None, pending=None,
- repopath=None):
+ repopath=None, action=None):
+ if action is None:
+ self.develwarn('action is None but will soon be a required '
+ 'parameter to ui.edit()')
extra_defaults = {
'prefix': 'editor',
'suffix': '.txt',
}
if extra is not None:
+ if extra.get('suffix') is not None:
+ self.develwarn('extra.suffix is not None but will soon be '
+ 'ignored by ui.edit()')
extra_defaults.update(extra)
extra = extra_defaults
+ if action == 'diff':
+ suffix = '.diff'
+ elif action:
+ suffix = '.%s.hg.txt' % action
+ else:
+ suffix = extra['suffix']
+
rdir = None
if self.configbool('experimental', 'editortmpinhg'):
rdir = repopath
(fd, name) = tempfile.mkstemp(prefix='hg-' + extra['prefix'] + '-',
- suffix=extra['suffix'],
+ suffix=suffix,
dir=rdir)
try:
f = os.fdopen(fd, r'wb')
@@ -1514,10 +1521,10 @@
if total:
pct = 100.0 * pos / total
- self.debug('%s:%s %s/%s%s (%4.2f%%)\n'
+ self.debug('%s:%s %d/%d%s (%4.2f%%)\n'
% (topic, item, pos, total, unit, pct))
else:
- self.debug('%s:%s %s%s\n' % (topic, item, pos, unit))
+ self.debug('%s:%s %d%s\n' % (topic, item, pos, unit))
def log(self, service, *msg, **opts):
'''hook for logging facility extensions
--- a/mercurial/unionrepo.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/unionrepo.py Sat Sep 30 07:52:48 2017 -0700
@@ -126,7 +126,7 @@
def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
raise NotImplementedError
- def addgroup(self, revs, linkmapper, transaction):
+ def addgroup(self, deltas, transaction, addrevisioncb=None):
raise NotImplementedError
def strip(self, rev, minlink):
raise NotImplementedError
--- a/mercurial/util.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/util.py Sat Sep 30 07:52:48 2017 -0700
@@ -13,8 +13,9 @@
hide platform-specific details from the core.
"""
-from __future__ import absolute_import
-
+from __future__ import absolute_import, print_function
+
+import abc
import bz2
import calendar
import codecs
@@ -25,6 +26,7 @@
import gc
import hashlib
import imp
+import mmap
import os
import platform as pyplatform
import re as remod
@@ -171,6 +173,14 @@
def safehasattr(thing, attr):
return getattr(thing, attr, _notset) is not _notset
+def bytesinput(fin, fout, *args, **kwargs):
+ sin, sout = sys.stdin, sys.stdout
+ try:
+ sys.stdin, sys.stdout = encoding.strio(fin), encoding.strio(fout)
+ return encoding.strtolocal(pycompat.rawinput(*args, **kwargs))
+ finally:
+ sys.stdin, sys.stdout = sin, sout
+
def bitsfrom(container):
bits = 0
for bit in container:
@@ -218,15 +228,15 @@
This helper can be used to compute one or more digests given their name.
- >>> d = digester(['md5', 'sha1'])
- >>> d.update('foo')
+ >>> d = digester([b'md5', b'sha1'])
+ >>> d.update(b'foo')
>>> [k for k in sorted(d)]
['md5', 'sha1']
- >>> d['md5']
+ >>> d[b'md5']
'acbd18db4cc2f85cedef654fccc4a4d8'
- >>> d['sha1']
+ >>> d[b'sha1']
'0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
- >>> digester.preferred(['md5', 'sha1'])
+ >>> digester.preferred([b'md5', b'sha1'])
'sha1'
"""
@@ -398,6 +408,17 @@
self._lenbuf += len(data)
self._buffer.append(data)
+def mmapread(fp):
+ try:
+ fd = getattr(fp, 'fileno', lambda: fp)()
+ return mmap.mmap(fd, 0, access=mmap.ACCESS_READ)
+ except ValueError:
+ # Empty files cannot be mmapped, but mmapread should still work. Check
+ # if the file is empty, and if so, return an empty buffer.
+ if os.fstat(fd).st_size == 0:
+ return ''
+ raise
+
def popen2(cmd, env=None, newlines=False):
# Setting bufsize to -1 lets the system decide the buffer size.
# The default for bufsize is 0, meaning unbuffered. This leads to
@@ -439,7 +460,7 @@
``n`` can be 2, 3, or 4. Here is how some version strings map to
returned values:
- >>> v = '3.6.1+190-df9b73d2d444'
+ >>> v = b'3.6.1+190-df9b73d2d444'
>>> versiontuple(v, 2)
(3, 6)
>>> versiontuple(v, 3)
@@ -447,10 +468,10 @@
>>> versiontuple(v, 4)
(3, 6, 1, '190-df9b73d2d444')
- >>> versiontuple('3.6.1+190-df9b73d2d444+20151118')
+ >>> versiontuple(b'3.6.1+190-df9b73d2d444+20151118')
(3, 6, 1, '190-df9b73d2d444+20151118')
- >>> v = '3.6'
+ >>> v = b'3.6'
>>> versiontuple(v, 2)
(3, 6)
>>> versiontuple(v, 3)
@@ -458,7 +479,7 @@
>>> versiontuple(v, 4)
(3, 6, None, None)
- >>> v = '3.9-rc'
+ >>> v = b'3.9-rc'
>>> versiontuple(v, 2)
(3, 9)
>>> versiontuple(v, 3)
@@ -466,7 +487,7 @@
>>> versiontuple(v, 4)
(3, 9, None, 'rc')
- >>> v = '3.9-rc+2-02a8fea4289b'
+ >>> v = b'3.9-rc+2-02a8fea4289b'
>>> versiontuple(v, 2)
(3, 9)
>>> versiontuple(v, 3)
@@ -567,15 +588,33 @@
return f
+class cow(object):
+ """helper class to make copy-on-write easier
+
+ Call preparewrite before doing any writes.
+ """
+
+ def preparewrite(self):
+ """call this before writes, return self or a copied new object"""
+ if getattr(self, '_copied', 0):
+ self._copied -= 1
+ return self.__class__(self)
+ return self
+
+ def copy(self):
+ """always do a cheap copy"""
+ self._copied = getattr(self, '_copied', 0) + 1
+ return self
+
class sortdict(collections.OrderedDict):
'''a simple sorted dictionary
- >>> d1 = sortdict([('a', 0), ('b', 1)])
+ >>> d1 = sortdict([(b'a', 0), (b'b', 1)])
>>> d2 = d1.copy()
>>> d2
sortdict([('a', 0), ('b', 1)])
- >>> d2.update([('a', 2)])
- >>> d2.keys() # should still be in last-set order
+ >>> d2.update([(b'a', 2)])
+ >>> list(d2.keys()) # should still be in last-set order
['b', 'a']
'''
@@ -592,6 +631,63 @@
for k, v in src:
self[k] = v
+class cowdict(cow, dict):
+ """copy-on-write dict
+
+ Be sure to call d = d.preparewrite() before writing to d.
+
+ >>> a = cowdict()
+ >>> a is a.preparewrite()
+ True
+ >>> b = a.copy()
+ >>> b is a
+ True
+ >>> c = b.copy()
+ >>> c is a
+ True
+ >>> a = a.preparewrite()
+ >>> b is a
+ False
+ >>> a is a.preparewrite()
+ True
+ >>> c = c.preparewrite()
+ >>> b is c
+ False
+ >>> b is b.preparewrite()
+ True
+ """
+
+class cowsortdict(cow, sortdict):
+ """copy-on-write sortdict
+
+ Be sure to call d = d.preparewrite() before writing to d.
+ """
+
+class transactional(object):
+ """Base class for making a transactional type into a context manager."""
+ __metaclass__ = abc.ABCMeta
+
+ @abc.abstractmethod
+ def close(self):
+ """Successfully closes the transaction."""
+
+ @abc.abstractmethod
+ def release(self):
+ """Marks the end of the transaction.
+
+ If the transaction has not been closed, it will be aborted.
+ """
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ try:
+ if exc_type is None:
+ self.close()
+ finally:
+ self.release()
+
@contextlib.contextmanager
def acceptintervention(tr=None):
"""A context manager that closes the transaction on InterventionRequired
@@ -610,6 +706,10 @@
finally:
tr.release()
+@contextlib.contextmanager
+def nullcontextmanager():
+ yield
+
class _lrucachenode(object):
"""A node in a doubly linked list.
@@ -934,10 +1034,9 @@
into. As a workaround, disable GC while building complex (huge)
containers.
- This garbage collector issue have been fixed in 2.7.
+ This garbage collector issue have been fixed in 2.7. But it still affect
+ CPython's performance.
"""
- if sys.version_info >= (2, 7):
- return func
def wrapper(*args, **kwargs):
gcenabled = gc.isenabled()
gc.disable()
@@ -948,6 +1047,10 @@
gc.enable()
return wrapper
+if pycompat.ispypy:
+ # PyPy runs slower with gc disabled
+ nogc = lambda x: x
+
def pathto(root, n1, n2):
'''return the relative path from one place to another.
root should use os.sep to separate directories
@@ -1189,32 +1292,34 @@
return hardlink, num
-_winreservednames = b'''con prn aux nul
- com1 com2 com3 com4 com5 com6 com7 com8 com9
- lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
+_winreservednames = {
+ 'con', 'prn', 'aux', 'nul',
+ 'com1', 'com2', 'com3', 'com4', 'com5', 'com6', 'com7', 'com8', 'com9',
+ 'lpt1', 'lpt2', 'lpt3', 'lpt4', 'lpt5', 'lpt6', 'lpt7', 'lpt8', 'lpt9',
+}
_winreservedchars = ':*?"<>|'
def checkwinfilename(path):
r'''Check that the base-relative path is a valid filename on Windows.
Returns None if the path is ok, or a UI string describing the problem.
- >>> checkwinfilename("just/a/normal/path")
- >>> checkwinfilename("foo/bar/con.xml")
+ >>> checkwinfilename(b"just/a/normal/path")
+ >>> checkwinfilename(b"foo/bar/con.xml")
"filename contains 'con', which is reserved on Windows"
- >>> checkwinfilename("foo/con.xml/bar")
+ >>> checkwinfilename(b"foo/con.xml/bar")
"filename contains 'con', which is reserved on Windows"
- >>> checkwinfilename("foo/bar/xml.con")
- >>> checkwinfilename("foo/bar/AUX/bla.txt")
+ >>> checkwinfilename(b"foo/bar/xml.con")
+ >>> checkwinfilename(b"foo/bar/AUX/bla.txt")
"filename contains 'AUX', which is reserved on Windows"
- >>> checkwinfilename("foo/bar/bla:.txt")
+ >>> checkwinfilename(b"foo/bar/bla:.txt")
"filename contains ':', which is reserved on Windows"
- >>> checkwinfilename("foo/bar/b\07la.txt")
+ >>> checkwinfilename(b"foo/bar/b\07la.txt")
"filename contains '\\x07', which is invalid on Windows"
- >>> checkwinfilename("foo/bar/bla ")
+ >>> checkwinfilename(b"foo/bar/bla ")
"filename ends with ' ', which is not allowed on Windows"
- >>> checkwinfilename("../bar")
- >>> checkwinfilename("foo\\")
+ >>> checkwinfilename(b"../bar")
+ >>> checkwinfilename(b"foo\\")
"filename ends with '\\', which is invalid on Windows"
- >>> checkwinfilename("foo\\/bar")
+ >>> checkwinfilename(b"foo\\/bar")
"directory name ends with '\\', which is invalid on Windows"
'''
if path.endswith('\\'):
@@ -1229,13 +1334,13 @@
return _("filename contains '%s', which is reserved "
"on Windows") % c
if ord(c) <= 31:
- return _("filename contains %r, which is invalid "
- "on Windows") % c
+ return _("filename contains '%s', which is invalid "
+ "on Windows") % escapestr(c)
base = n.split('.')[0]
if base and base.lower() in _winreservednames:
return _("filename contains '%s', which is reserved "
"on Windows") % base
- t = n[-1]
+ t = n[-1:]
if t in '. ' and n not in '..':
return _("filename ends with '%s', which is not allowed "
"on Windows") % t
@@ -1414,34 +1519,27 @@
# testfile may be open, so we need a separate file for checking to
# work around issue2543 (or testfile may get lost on Samba shares)
- f1 = testfile + ".hgtmp1"
- if os.path.lexists(f1):
- return False
+ f1, f2, fp = None, None, None
try:
- posixfile(f1, 'w').close()
- except IOError:
- try:
- os.unlink(f1)
- except OSError:
- pass
- return False
-
- f2 = testfile + ".hgtmp2"
- fd = None
- try:
+ fd, f1 = tempfile.mkstemp(prefix='.%s-' % os.path.basename(testfile),
+ suffix='1~', dir=os.path.dirname(testfile))
+ os.close(fd)
+ f2 = '%s2~' % f1[:-2]
+
oslink(f1, f2)
# nlinks() may behave differently for files on Windows shares if
# the file is open.
- fd = posixfile(f2)
+ fp = posixfile(f2)
return nlinks(f2) > 1
except OSError:
return False
finally:
- if fd is not None:
- fd.close()
+ if fp is not None:
+ fp.close()
for f in (f1, f2):
try:
- os.unlink(f)
+ if f is not None:
+ os.unlink(f)
except OSError:
pass
@@ -1484,7 +1582,7 @@
Returns the name of the temporary file.
"""
d, fn = os.path.split(name)
- fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
+ fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, suffix='~', dir=d)
os.close(fd)
# Temporary files are created with mode 0600, which is usually not
# what we want. If the original file already exists, just copy
@@ -1958,15 +2056,15 @@
The date may be a "unixtime offset" string or in one of the specified
formats. If the date already is a (unixtime, offset) tuple, it is returned.
- >>> parsedate(' today ') == parsedate(\
- datetime.date.today().strftime('%b %d'))
+ >>> parsedate(b' today ') == parsedate(
+ ... datetime.date.today().strftime('%b %d').encode('ascii'))
True
- >>> parsedate( 'yesterday ') == parsedate((datetime.date.today() -\
- datetime.timedelta(days=1)\
- ).strftime('%b %d'))
+ >>> parsedate(b'yesterday ') == parsedate(
+ ... (datetime.date.today() - datetime.timedelta(days=1)
+ ... ).strftime('%b %d').encode('ascii'))
True
>>> now, tz = makedate()
- >>> strnow, strtz = parsedate('now')
+ >>> strnow, strtz = parsedate(b'now')
>>> (strnow - now) < 1
True
>>> tz == strtz
@@ -1985,10 +2083,12 @@
if date == 'now' or date == _('now'):
return makedate()
if date == 'today' or date == _('today'):
- date = datetime.date.today().strftime('%b %d')
+ date = datetime.date.today().strftime(r'%b %d')
+ date = encoding.strtolocal(date)
elif date == 'yesterday' or date == _('yesterday'):
date = (datetime.date.today() -
- datetime.timedelta(days=1)).strftime('%b %d')
+ datetime.timedelta(days=1)).strftime(r'%b %d')
+ date = encoding.strtolocal(date)
try:
when, offset = map(int, date.split(' '))
@@ -2040,12 +2140,12 @@
'>{date}' on or after a given date
- >>> p1 = parsedate("10:29:59")
- >>> p2 = parsedate("10:30:00")
- >>> p3 = parsedate("10:30:59")
- >>> p4 = parsedate("10:31:00")
- >>> p5 = parsedate("Sep 15 10:30:00 1999")
- >>> f = matchdate("10:30")
+ >>> p1 = parsedate(b"10:29:59")
+ >>> p2 = parsedate(b"10:30:00")
+ >>> p3 = parsedate(b"10:30:59")
+ >>> p4 = parsedate(b"10:31:00")
+ >>> p5 = parsedate(b"Sep 15 10:30:00 1999")
+ >>> f = matchdate(b"10:30")
>>> f(p1[0])
False
>>> f(p2[0])
@@ -2120,27 +2220,27 @@
... return (kind, pattern, [bool(matcher(t)) for t in tests])
exact matching (no prefix):
- >>> test('abcdefg', 'abc', 'def', 'abcdefg')
+ >>> test(b'abcdefg', b'abc', b'def', b'abcdefg')
('literal', 'abcdefg', [False, False, True])
regex matching ('re:' prefix)
- >>> test('re:a.+b', 'nomatch', 'fooadef', 'fooadefbar')
+ >>> test(b're:a.+b', b'nomatch', b'fooadef', b'fooadefbar')
('re', 'a.+b', [False, False, True])
force exact matches ('literal:' prefix)
- >>> test('literal:re:foobar', 'foobar', 're:foobar')
+ >>> test(b'literal:re:foobar', b'foobar', b're:foobar')
('literal', 're:foobar', [False, True])
unknown prefixes are ignored and treated as literals
- >>> test('foo:bar', 'foo', 'bar', 'foo:bar')
+ >>> test(b'foo:bar', b'foo', b'bar', b'foo:bar')
('literal', 'foo:bar', [False, False, True])
case insensitive regex matches
- >>> itest('re:A.+b', 'nomatch', 'fooadef', 'fooadefBar')
+ >>> itest(b're:A.+b', b'nomatch', b'fooadef', b'fooadefBar')
('re', 'A.+b', [False, False, True])
case insensitive literal matches
- >>> itest('ABCDEFG', 'abc', 'def', 'abcdefg')
+ >>> itest(b'ABCDEFG', b'abc', b'def', b'abcdefg')
('literal', 'ABCDEFG', [False, False, True])
"""
if pattern.startswith('re:'):
@@ -2272,6 +2372,15 @@
def unescapestr(s):
return codecs.escape_decode(s)[0]
+def forcebytestr(obj):
+ """Portably format an arbitrary object (e.g. exception) into a byte
+ string."""
+ try:
+ return pycompat.bytestr(obj)
+ except UnicodeEncodeError:
+ # non-ascii string, may be lossy
+ return pycompat.bytestr(encoding.strtolocal(str(obj)))
+
def uirepr(s):
# Avoid double backslash in Windows path repr()
return repr(s).replace('\\\\', '\\')
@@ -2604,55 +2713,55 @@
Examples:
- >>> url('http://www.ietf.org/rfc/rfc2396.txt')
+ >>> url(b'http://www.ietf.org/rfc/rfc2396.txt')
<url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
- >>> url('ssh://[::1]:2200//home/joe/repo')
+ >>> url(b'ssh://[::1]:2200//home/joe/repo')
<url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
- >>> url('file:///home/joe/repo')
+ >>> url(b'file:///home/joe/repo')
<url scheme: 'file', path: '/home/joe/repo'>
- >>> url('file:///c:/temp/foo/')
+ >>> url(b'file:///c:/temp/foo/')
<url scheme: 'file', path: 'c:/temp/foo/'>
- >>> url('bundle:foo')
+ >>> url(b'bundle:foo')
<url scheme: 'bundle', path: 'foo'>
- >>> url('bundle://../foo')
+ >>> url(b'bundle://../foo')
<url scheme: 'bundle', path: '../foo'>
- >>> url(r'c:\foo\bar')
+ >>> url(br'c:\foo\bar')
<url path: 'c:\\foo\\bar'>
- >>> url(r'\\blah\blah\blah')
+ >>> url(br'\\blah\blah\blah')
<url path: '\\\\blah\\blah\\blah'>
- >>> url(r'\\blah\blah\blah#baz')
+ >>> url(br'\\blah\blah\blah#baz')
<url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
- >>> url(r'file:///C:\users\me')
+ >>> url(br'file:///C:\users\me')
<url scheme: 'file', path: 'C:\\users\\me'>
Authentication credentials:
- >>> url('ssh://joe:xyz@x/repo')
+ >>> url(b'ssh://joe:xyz@x/repo')
<url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
- >>> url('ssh://joe@x/repo')
+ >>> url(b'ssh://joe@x/repo')
<url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
Query strings and fragments:
- >>> url('http://host/a?b#c')
+ >>> url(b'http://host/a?b#c')
<url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
- >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
+ >>> url(b'http://host/a?b#c', parsequery=False, parsefragment=False)
<url scheme: 'http', host: 'host', path: 'a?b#c'>
Empty path:
- >>> url('')
+ >>> url(b'')
<url path: ''>
- >>> url('#a')
+ >>> url(b'#a')
<url path: '', fragment: 'a'>
- >>> url('http://host/')
+ >>> url(b'http://host/')
<url scheme: 'http', host: 'host', path: ''>
- >>> url('http://host/#a')
+ >>> url(b'http://host/#a')
<url scheme: 'http', host: 'host', path: '', fragment: 'a'>
Only scheme:
- >>> url('http:')
+ >>> url(b'http:')
<url scheme: 'http'>
"""
@@ -2752,6 +2861,7 @@
if v is not None:
setattr(self, a, urlreq.unquote(v))
+ @encoding.strmethod
def __repr__(self):
attrs = []
for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
@@ -2766,33 +2876,33 @@
Examples:
- >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
+ >>> bytes(url(b'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
- >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
+ >>> bytes(url(b'http://user:pw@host:80/?foo=bar&baz=42'))
'http://user:pw@host:80/?foo=bar&baz=42'
- >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
+ >>> bytes(url(b'http://user:pw@host:80/?foo=bar%3dbaz'))
'http://user:pw@host:80/?foo=bar%3dbaz'
- >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
+ >>> bytes(url(b'ssh://user:pw@[::1]:2200//home/joe#'))
'ssh://user:pw@[::1]:2200//home/joe#'
- >>> str(url('http://localhost:80//'))
+ >>> bytes(url(b'http://localhost:80//'))
'http://localhost:80//'
- >>> str(url('http://localhost:80/'))
+ >>> bytes(url(b'http://localhost:80/'))
'http://localhost:80/'
- >>> str(url('http://localhost:80'))
+ >>> bytes(url(b'http://localhost:80'))
'http://localhost:80/'
- >>> str(url('bundle:foo'))
+ >>> bytes(url(b'bundle:foo'))
'bundle:foo'
- >>> str(url('bundle://../foo'))
+ >>> bytes(url(b'bundle://../foo'))
'bundle:../foo'
- >>> str(url('path'))
+ >>> bytes(url(b'path'))
'path'
- >>> str(url('file:///tmp/foo/bar'))
+ >>> bytes(url(b'file:///tmp/foo/bar'))
'file:///tmp/foo/bar'
- >>> str(url('file:///c:/tmp/foo/bar'))
+ >>> bytes(url(b'file:///c:/tmp/foo/bar'))
'file:///c:/tmp/foo/bar'
- >>> print url(r'bundle:foo\bar')
+ >>> print(url(br'bundle:foo\bar'))
bundle:foo\bar
- >>> print url(r'file:///D:\data\hg')
+ >>> print(url(br'file:///D:\data\hg'))
file:///D:\data\hg
"""
if self._localpath:
@@ -2971,11 +3081,11 @@
def sizetoint(s):
'''Convert a space specifier to a byte count.
- >>> sizetoint('30')
+ >>> sizetoint(b'30')
30
- >>> sizetoint('2.2kb')
+ >>> sizetoint(b'2.2kb')
2252
- >>> sizetoint('6M')
+ >>> sizetoint(b'6M')
6291456
'''
t = s.strip().lower()
@@ -3710,10 +3820,14 @@
value = docobject()
value.__doc__ = doc
+ value._origdoc = engine.bundletype.__doc__
+ value._origfunc = engine.bundletype
items[bt[0]] = value
return items
+i18nfunctions = bundlecompressiontopics().values()
+
# convenient shortcut
dst = debugstacktrace
--- a/mercurial/vfs.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/vfs.py Sat Sep 30 07:52:48 2017 -0700
@@ -16,6 +16,7 @@
from .i18n import _
from . import (
+ encoding,
error,
pathutil,
pycompat,
@@ -434,7 +435,8 @@
os.symlink(src, linkname)
except OSError as err:
raise OSError(err.errno, _('could not symlink to %r: %s') %
- (src, err.strerror), linkname)
+ (src, encoding.strtolocal(err.strerror)),
+ linkname)
else:
self.write(dst, src)
--- a/mercurial/win32.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/win32.py Sat Sep 30 07:52:48 2017 -0700
@@ -286,7 +286,8 @@
if code > 0x7fffffff:
code -= 2**32
err = ctypes.WinError(code=code)
- raise OSError(err.errno, '%s: %s' % (name, err.strerror))
+ raise OSError(err.errno, '%s: %s' % (name,
+ encoding.strtolocal(err.strerror)))
def _getfileinfo(name):
fh = _kernel32.CreateFileA(name, 0,
--- a/mercurial/windows.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/windows.py Sat Sep 30 07:52:48 2017 -0700
@@ -137,7 +137,8 @@
return fp
except WindowsError as err:
# convert to a friendlier exception
- raise IOError(err.errno, '%s: %s' % (name, err.strerror))
+ raise IOError(err.errno, '%s: %s' % (
+ name, encoding.strtolocal(err.strerror)))
# may be wrapped by win32mbcs extension
listdir = osutil.listdir
@@ -266,15 +267,15 @@
_needsshellquote = None
def shellquote(s):
r"""
- >>> shellquote(r'C:\Users\xyz')
+ >>> shellquote(br'C:\Users\xyz')
'"C:\\Users\\xyz"'
- >>> shellquote(r'C:\Users\xyz/mixed')
+ >>> shellquote(br'C:\Users\xyz/mixed')
'"C:\\Users\\xyz/mixed"'
>>> # Would be safe not to quote too, since it is all double backslashes
- >>> shellquote(r'C:\\Users\\xyz')
+ >>> shellquote(br'C:\\Users\\xyz')
'"C:\\\\Users\\\\xyz"'
>>> # But this must be quoted
- >>> shellquote(r'C:\\Users\\xyz/abc')
+ >>> shellquote(br'C:\\Users\\xyz/abc')
'"C:\\\\Users\\\\xyz/abc"'
"""
global _quotere
--- a/mercurial/wireproto.py Mon Sep 18 10:54:00 2017 -0700
+++ b/mercurial/wireproto.py Sat Sep 30 07:52:48 2017 -0700
@@ -8,7 +8,6 @@
from __future__ import absolute_import
import hashlib
-import itertools
import os
import tempfile
@@ -22,12 +21,14 @@
from . import (
bundle2,
changegroup as changegroupmod,
+ discovery,
encoding,
error,
exchange,
peer,
pushkey as pushkeymod,
pycompat,
+ repository,
streamclone,
util,
)
@@ -80,49 +81,19 @@
# """
# raise NotImplementedError()
-class remotebatch(peer.batcher):
- '''batches the queued calls; uses as few roundtrips as possible'''
- def __init__(self, remote):
- '''remote must support _submitbatch(encbatch) and
- _submitone(op, encargs)'''
- peer.batcher.__init__(self)
- self.remote = remote
- def submit(self):
- req, rsp = [], []
- for name, args, opts, resref in self.calls:
- mtd = getattr(self.remote, name)
- batchablefn = getattr(mtd, 'batchable', None)
- if batchablefn is not None:
- batchable = batchablefn(mtd.im_self, *args, **opts)
- encargsorres, encresref = next(batchable)
- if encresref:
- req.append((name, encargsorres,))
- rsp.append((batchable, encresref, resref,))
- else:
- resref.set(encargsorres)
- else:
- if req:
- self._submitreq(req, rsp)
- req, rsp = [], []
- resref.set(mtd(*args, **opts))
- if req:
- self._submitreq(req, rsp)
- def _submitreq(self, req, rsp):
- encresults = self.remote._submitbatch(req)
- for encres, r in zip(encresults, rsp):
- batchable, encresref, resref = r
- encresref.set(encres)
- resref.set(next(batchable))
-
class remoteiterbatcher(peer.iterbatcher):
def __init__(self, remote):
super(remoteiterbatcher, self).__init__()
self._remote = remote
def __getattr__(self, name):
- if not getattr(self._remote, name, False):
- raise AttributeError(
- 'Attempted to iterbatch non-batchable call to %r' % name)
+ # Validate this method is batchable, since submit() only supports
+ # batchable methods.
+ fn = getattr(self._remote, name)
+ if not getattr(fn, 'batchable', None):
+ raise error.ProgrammingError('Attempted to batch a non-batchable '
+ 'call to %r' % name)
+
return super(remoteiterbatcher, self).__getattr__(name)
def submit(self):
@@ -131,23 +102,47 @@
This is mostly valuable over http where request sizes can be
limited, but can be used in other places as well.
"""
- req, rsp = [], []
- for name, args, opts, resref in self.calls:
- mtd = getattr(self._remote, name)
+ # 2-tuple of (command, arguments) that represents what will be
+ # sent over the wire.
+ requests = []
+
+ # 4-tuple of (command, final future, @batchable generator, remote
+ # future).
+ results = []
+
+ for command, args, opts, finalfuture in self.calls:
+ mtd = getattr(self._remote, command)
batchable = mtd.batchable(mtd.im_self, *args, **opts)
- encargsorres, encresref = next(batchable)
- assert encresref
- req.append((name, encargsorres))
- rsp.append((batchable, encresref))
- if req:
- self._resultiter = self._remote._submitbatch(req)
- self._rsp = rsp
+
+ commandargs, fremote = next(batchable)
+ assert fremote
+ requests.append((command, commandargs))
+ results.append((command, finalfuture, batchable, fremote))
+
+ if requests:
+ self._resultiter = self._remote._submitbatch(requests)
+
+ self._results = results
def results(self):
- for (batchable, encresref), encres in itertools.izip(
- self._rsp, self._resultiter):
- encresref.set(encres)
- yield next(batchable)
+ for command, finalfuture, batchable, remotefuture in self._results:
+ # Get the raw result, set it in the remote future, feed it
+ # back into the @batchable generator so it can be decoded, and
+ # set the result on the final future to this value.
+ remoteresult = next(self._resultiter)
+ remotefuture.set(remoteresult)
+ finalfuture.set(next(batchable))
+
+ # Verify our @batchable generators only emit 2 values.
+ try:
+ next(batchable)
+ except StopIteration:
+ pass
+ else:
+ raise error.ProgrammingError('%s @batchable generator emitted '
+ 'unexpected value count' % command)
+
+ yield finalfuture.value
# Forward a couple of names from peer to make wireproto interactions
# slightly more sensible.
@@ -212,6 +207,7 @@
gboptsmap = {'heads': 'nodes',
'common': 'nodes',
'obsmarkers': 'boolean',
+ 'phases': 'boolean',
'bundlecaps': 'scsv',
'listkeys': 'csv',
'cg': 'boolean',
@@ -219,7 +215,7 @@
# client side
-class wirepeer(peer.peerrepository):
+class wirepeer(repository.legacypeer):
"""Client-side interface for communicating with a peer repository.
Methods commonly call wire protocol commands of the same name.
@@ -227,33 +223,7 @@
See also httppeer.py and sshpeer.py for protocol-specific
implementations of this interface.
"""
- def batch(self):
- if self.capable('batch'):
- return remotebatch(self)
- else:
- return peer.localbatch(self)
- def _submitbatch(self, req):
- """run batch request <req> on the server
-
- Returns an iterator of the raw responses from the server.
- """
- rsp = self._callstream("batch", cmds=encodebatchcmds(req))
- chunk = rsp.read(1024)
- work = [chunk]
- while chunk:
- while ';' not in chunk and chunk:
- chunk = rsp.read(1024)
- work.append(chunk)
- merged = ''.join(work)
- while ';' in merged:
- one, merged = merged.split(';', 1)
- yield unescapearg(one)
- chunk = rsp.read(1024)
- work = [merged, chunk]
- yield unescapearg(''.join(work))
-
- def _submitone(self, op, args):
- return self._call(op, **args)
+ # Begin of basewirepeer interface.
def iterbatch(self):
return remoteiterbatcher(self)
@@ -267,7 +237,8 @@
success, data = d[:-1].split(" ", 1)
if int(success):
yield bin(data)
- self._abort(error.RepoError(data))
+ else:
+ self._abort(error.RepoError(data))
@batchable
def heads(self):
@@ -305,26 +276,17 @@
except TypeError:
self._abort(error.ResponseError(_("unexpected response:"), d))
- def branches(self, nodes):
- n = encodelist(nodes)
- d = self._call("branches", nodes=n)
- try:
- br = [tuple(decodelist(b)) for b in d.splitlines()]
- return br
- except ValueError:
- self._abort(error.ResponseError(_("unexpected response:"), d))
-
- def between(self, pairs):
- batch = 8 # avoid giant requests
- r = []
- for i in xrange(0, len(pairs), batch):
- n = " ".join([encodelist(p, '-') for p in pairs[i:i + batch]])
- d = self._call("between", pairs=n)
- try:
- r.extend(l and decodelist(l) or [] for l in d.splitlines())
- except ValueError:
- self._abort(error.ResponseError(_("unexpected response:"), d))
- return r
+ @batchable
+ def listkeys(self, namespace):
+ if not self.capable('pushkey'):
+ yield {}, None
+ f = future()
+ self.ui.debug('preparing listkeys for "%s"\n' % namespace)
+ yield {'namespace': encoding.fromlocal(namespace)}, f
+ d = f.value
+ self.ui.debug('received listkey for "%s": %i bytes\n'
+ % (namespace, len(d)))
+ yield pushkeymod.decodekeys(d)
@batchable
def pushkey(self, namespace, key, old, new):
@@ -347,34 +309,9 @@
self.ui.status(_('remote: '), l)
yield d
- @batchable
- def listkeys(self, namespace):
- if not self.capable('pushkey'):
- yield {}, None
- f = future()
- self.ui.debug('preparing listkeys for "%s"\n' % namespace)
- yield {'namespace': encoding.fromlocal(namespace)}, f
- d = f.value
- self.ui.debug('received listkey for "%s": %i bytes\n'
- % (namespace, len(d)))
- yield pushkeymod.decodekeys(d)
-
def stream_out(self):
return self._callstream('stream_out')
- def changegroup(self, nodes, kind):
- n = encodelist(nodes)
- f = self._callcompressable("changegroup", roots=n)
- return changegroupmod.cg1unpacker(f, 'UN')
-
- def changegroupsubset(self, bases, heads, kind):
- self.requirecap('changegroupsubset', _('look up remote changes'))
- bases = encodelist(bases)
- heads = encodelist(heads)
- f = self._callcompressable("changegroupsubset",
- bases=bases, heads=heads)
- return changegroupmod.cg1unpacker(f, 'UN')
-
def getbundle(self, source, **kwargs):
self.requirecap('getbundle', _('look up remote changes'))
opts = {}
@@ -445,6 +382,69 @@
ret = bundle2.getunbundler(self.ui, stream)
return ret
+ # End of basewirepeer interface.
+
+ # Begin of baselegacywirepeer interface.
+
+ def branches(self, nodes):
+ n = encodelist(nodes)
+ d = self._call("branches", nodes=n)
+ try:
+ br = [tuple(decodelist(b)) for b in d.splitlines()]
+ return br
+ except ValueError:
+ self._abort(error.ResponseError(_("unexpected response:"), d))
+
+ def between(self, pairs):
+ batch = 8 # avoid giant requests
+ r = []
+ for i in xrange(0, len(pairs), batch):
+ n = " ".join([encodelist(p, '-') for p in pairs[i:i + batch]])
+ d = self._call("between", pairs=n)
+ try:
+ r.extend(l and decodelist(l) or [] for l in d.splitlines())
+ except ValueError:
+ self._abort(error.ResponseError(_("unexpected response:"), d))
+ return r
+
+ def changegroup(self, nodes, kind):
+ n = encodelist(nodes)
+ f = self._callcompressable("changegroup", roots=n)
+ return changegroupmod.cg1unpacker(f, 'UN')
+
+ def changegroupsubset(self, bases, heads, kind):
+ self.requirecap('changegroupsubset', _('look up remote changes'))
+ bases = encodelist(bases)
+ heads = encodelist(heads)
+ f = self._callcompressable("changegroupsubset",
+ bases=bases, heads=heads)
+ return changegroupmod.cg1unpacker(f, 'UN')
+
+ # End of baselegacywirepeer interface.
+
+ def _submitbatch(self, req):
+ """run batch request <req> on the server
+
+ Returns an iterator of the raw responses from the server.
+ """
+ rsp = self._callstream("batch", cmds=encodebatchcmds(req))
+ chunk = rsp.read(1024)
+ work = [chunk]
+ while chunk:
+ while ';' not in chunk and chunk:
+ chunk = rsp.read(1024)
+ work.append(chunk)
+ merged = ''.join(work)
+ while ';' in merged:
+ one, merged = merged.split(';', 1)
+ yield unescapearg(one)
+ chunk = rsp.read(1024)
+ work = [merged, chunk]
+ yield unescapearg(''.join(work))
+
+ def _submitone(self, op, args):
+ return self._call(op, **args)
+
def debugwireargs(self, one, two, three=None, four=None, five=None):
# don't pass optional arguments left at their default value
opts = {}
@@ -796,14 +796,18 @@
@wireprotocommand('changegroup', 'roots')
def changegroup(repo, proto, roots):
nodes = decodelist(roots)
- cg = changegroupmod.changegroup(repo, nodes, 'serve')
+ outgoing = discovery.outgoing(repo, missingroots=nodes,
+ missingheads=repo.heads())
+ cg = changegroupmod.makechangegroup(repo, outgoing, '01', 'serve')
return streamres(reader=cg, v1compressible=True)
@wireprotocommand('changegroupsubset', 'bases heads')
def changegroupsubset(repo, proto, bases, heads):
bases = decodelist(bases)
heads = decodelist(heads)
- cg = changegroupmod.changegroupsubset(repo, bases, heads, 'serve')
+ outgoing = discovery.outgoing(repo, missingroots=bases,
+ missingheads=heads)
+ cg = changegroupmod.makechangegroup(repo, outgoing, '01', 'serve')
return streamres(reader=cg, v1compressible=True)
@wireprotocommand('debugwireargs', 'one two *')
--- a/setup.py Mon Sep 18 10:54:00 2017 -0700
+++ b/setup.py Sat Sep 30 07:52:48 2017 -0700
@@ -760,13 +760,14 @@
'mercurial/cext/mpatch.c'],
include_dirs=common_include_dirs,
depends=common_depends),
- Extension('mercurial.cext.parsers', ['mercurial/cext/dirs.c',
+ Extension('mercurial.cext.parsers', ['mercurial/cext/charencode.c',
+ 'mercurial/cext/dirs.c',
'mercurial/cext/manifest.c',
'mercurial/cext/parsers.c',
'mercurial/cext/pathencode.c',
'mercurial/cext/revlog.c'],
include_dirs=common_include_dirs,
- depends=common_depends),
+ depends=common_depends + ['mercurial/cext/charencode.h']),
Extension('mercurial.cext.osutil', ['mercurial/cext/osutil.c'],
include_dirs=common_include_dirs,
extra_compile_args=osutil_cflags,
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/bruterebase.py Sat Sep 30 07:52:48 2017 -0700
@@ -0,0 +1,74 @@
+# bruterebase.py - brute force rebase testing
+#
+# Copyright 2017 Facebook, Inc.
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from __future__ import absolute_import
+
+from mercurial import (
+ error,
+ registrar,
+ revsetlang,
+)
+
+from hgext import rebase
+
+try:
+ xrange
+except NameError:
+ xrange = range
+
+cmdtable = {}
+command = registrar.command(cmdtable)
+
+@command(b'debugbruterebase')
+def debugbruterebase(ui, repo, source, dest):
+ """for every non-empty subset of source, run rebase -r subset -d dest
+
+ Print one line summary for each subset. Assume obsstore is enabled.
+ """
+ srevs = list(repo.revs(source))
+
+ with repo.wlock(), repo.lock():
+ repolen = len(repo)
+ cl = repo.changelog
+
+ def getdesc(rev):
+ result = cl.changelogrevision(rev).description
+ if rev >= repolen:
+ result += b"'"
+ return result
+
+ for i in xrange(1, 2 ** len(srevs)):
+ subset = [rev for j, rev in enumerate(srevs) if i & (1 << j) != 0]
+ spec = revsetlang.formatspec(b'%ld', subset)
+ tr = repo.transaction(b'rebase')
+ tr.report = lambda x: 0 # hide "transaction abort"
+
+ ui.pushbuffer()
+ try:
+ rebase.rebase(ui, repo, dest=dest, rev=[spec])
+ except error.Abort as ex:
+ summary = b'ABORT: %s' % ex
+ except Exception as ex:
+ summary = b'CRASH: %s' % ex
+ else:
+ # short summary about new nodes
+ cl = repo.changelog
+ descs = []
+ for rev in xrange(repolen, len(repo)):
+ desc = b'%s:' % getdesc(rev)
+ for prev in cl.parentrevs(rev):
+ if prev > -1:
+ desc += getdesc(prev)
+ descs.append(desc)
+ descs.sort()
+ summary = ' '.join(descs)
+ ui.popbuffer()
+ repo.vfs.tryunlink(b'rebasestate')
+
+ subsetdesc = b''.join(getdesc(rev) for rev in subset)
+ ui.write((b'%s: %s\n') % (subsetdesc.rjust(len(srevs)), summary))
+ tr.abort()
--- a/tests/drawdag.py Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/drawdag.py Sat Sep 30 07:52:48 2017 -0700
@@ -84,6 +84,7 @@
import collections
import itertools
+import re
from mercurial.i18n import _
from mercurial import (
@@ -91,6 +92,7 @@
error,
node,
obsolete,
+ pycompat,
registrar,
scmutil,
tags as tagsmod,
@@ -99,9 +101,9 @@
cmdtable = {}
command = registrar.command(cmdtable)
-_pipechars = '\\/+-|'
-_nonpipechars = ''.join(chr(i) for i in xrange(33, 127)
- if chr(i) not in _pipechars)
+_pipechars = b'\\/+-|'
+_nonpipechars = b''.join(pycompat.bytechr(i) for i in range(33, 127)
+ if pycompat.bytechr(i) not in _pipechars)
def _isname(ch):
"""char -> bool. return True if ch looks like part of a name, False
@@ -109,7 +111,52 @@
return ch in _nonpipechars
def _parseasciigraph(text):
- """str -> {str : [str]}. convert the ASCII graph to edges"""
+ r"""str -> {str : [str]}. convert the ASCII graph to edges
+
+ >>> import pprint
+ >>> pprint.pprint({pycompat.sysstr(k): [pycompat.sysstr(vv) for vv in v]
+ ... for k, v in _parseasciigraph(br'''
+ ... G
+ ... |
+ ... I D C F # split: B -> E, F, G
+ ... \ \| | # replace: C -> D -> H
+ ... H B E # prune: F, I
+ ... \|/
+ ... A
+ ... ''').items()})
+ {'A': [],
+ 'B': ['A'],
+ 'C': ['B'],
+ 'D': ['B'],
+ 'E': ['A'],
+ 'F': ['E'],
+ 'G': ['F'],
+ 'H': ['A'],
+ 'I': ['H']}
+ >>> pprint.pprint({pycompat.sysstr(k): [pycompat.sysstr(vv) for vv in v]
+ ... for k, v in _parseasciigraph(br'''
+ ... o foo
+ ... |\
+ ... +---o bar
+ ... | | |
+ ... | o | baz
+ ... | /
+ ... +---o d
+ ... | |
+ ... +---o c
+ ... | |
+ ... o | b
+ ... |/
+ ... o a
+ ... ''').items()})
+ {'a': [],
+ 'b': ['a'],
+ 'bar': ['b', 'a'],
+ 'baz': [],
+ 'c': ['b'],
+ 'd': ['b'],
+ 'foo': ['baz', 'b']}
+ """
lines = text.splitlines()
edges = collections.defaultdict(list) # {node: []}
@@ -117,16 +164,16 @@
"""(int, int) -> char. give a coordinate, return the char. return a
space for anything out of range"""
if x < 0 or y < 0:
- return ' '
+ return b' '
try:
- return lines[y][x]
+ return lines[y][x:x + 1] or b' '
except IndexError:
- return ' '
+ return b' '
def getname(y, x):
"""(int, int) -> str. like get(y, x) but concatenate left and right
parts. if name is an 'o', try to replace it to the right"""
- result = ''
+ result = b''
for i in itertools.count(0):
ch = get(y, x - i)
if not _isname(ch):
@@ -137,17 +184,17 @@
if not _isname(ch):
break
result += ch
- if result == 'o':
+ if result == b'o':
# special handling, find the name to the right
- result = ''
+ result = b''
for i in itertools.count(2):
ch = get(y, x + i)
- if ch == ' ' or ch in _pipechars:
+ if ch == b' ' or ch in _pipechars:
if result or x + i >= len(lines[y]):
break
else:
result += ch
- return result or 'o'
+ return result or b'o'
return result
def parents(y, x):
@@ -163,19 +210,19 @@
if '-' (or '+') is not in excepted, and get(y, x) is '-' (or '+'),
the next line (y + 1, x) will be checked instead."""
ch = get(y, x)
- if any(ch == c and c not in expected for c in '-+'):
+ if any(ch == c and c not in expected for c in (b'-', b'+')):
y += 1
return follow(y + 1, x, expected)
- if ch in expected or ('o' in expected and _isname(ch)):
+ if ch in expected or (b'o' in expected and _isname(ch)):
visit.append((y, x))
# -o- # starting point:
# /|\ # follow '-' (horizontally), and '/|\' (to the bottom)
- follow(y + 1, x, '|')
- follow(y + 1, x - 1, '/')
- follow(y + 1, x + 1, '\\')
- follow(y, x - 1, '-')
- follow(y, x + 1, '-')
+ follow(y + 1, x, b'|')
+ follow(y + 1, x - 1, b'/')
+ follow(y + 1, x + 1, b'\\')
+ follow(y, x - 1, b'-')
+ follow(y, x + 1, b'-')
while visit:
y, x = visit.pop()
@@ -186,28 +233,28 @@
if _isname(ch):
result.append(getname(y, x))
continue
- elif ch == '|':
- follow(y + 1, x, '/|o')
- follow(y + 1, x - 1, '/')
- follow(y + 1, x + 1, '\\')
- elif ch == '+':
- follow(y, x - 1, '-')
- follow(y, x + 1, '-')
- follow(y + 1, x - 1, '/')
- follow(y + 1, x + 1, '\\')
- follow(y + 1, x, '|')
- elif ch == '\\':
- follow(y + 1, x + 1, '\\|o')
- elif ch == '/':
- follow(y + 1, x - 1, '/|o')
- elif ch == '-':
- follow(y, x - 1, '-+o')
- follow(y, x + 1, '-+o')
+ elif ch == b'|':
+ follow(y + 1, x, b'/|o')
+ follow(y + 1, x - 1, b'/')
+ follow(y + 1, x + 1, b'\\')
+ elif ch == b'+':
+ follow(y, x - 1, b'-')
+ follow(y, x + 1, b'-')
+ follow(y + 1, x - 1, b'/')
+ follow(y + 1, x + 1, b'\\')
+ follow(y + 1, x, b'|')
+ elif ch == b'\\':
+ follow(y + 1, x + 1, b'\\|o')
+ elif ch == b'/':
+ follow(y + 1, x - 1, b'/|o')
+ elif ch == b'-':
+ follow(y, x - 1, b'-+o')
+ follow(y, x + 1, b'-+o')
return result
for y, line in enumerate(lines):
- for x, ch in enumerate(line):
- if ch == '#': # comment
+ for x, ch in enumerate(pycompat.bytestr(line)):
+ if ch == b'#': # comment
break
if _isname(ch):
edges[getname(y, x)] += parents(y, x)
@@ -232,14 +279,14 @@
return None
def flags(self):
- return ''
+ return b''
class simplecommitctx(context.committablectx):
def __init__(self, repo, name, parentctxs, added):
opts = {
'changes': scmutil.status([], list(added), [], [], [], [], []),
- 'date': '0 0',
- 'extra': {'branch': 'default'},
+ 'date': b'0 0',
+ 'extra': {b'branch': b'default'},
}
super(simplecommitctx, self).__init__(self, name, **opts)
self._repo = repo
@@ -258,7 +305,7 @@
"""yield node, parents in topologically order"""
visible = set(edges.keys())
remaining = {} # {str: [str]}
- for k, vs in edges.iteritems():
+ for k, vs in edges.items():
for v in vs:
if v not in remaining:
remaining[v] = []
@@ -271,11 +318,29 @@
if leaf in visible:
yield leaf, edges[leaf]
del remaining[leaf]
- for k, v in remaining.iteritems():
+ for k, v in remaining.items():
if leaf in v:
v.remove(leaf)
-@command('debugdrawdag', [])
+def _getcomments(text):
+ """
+ >>> [pycompat.sysstr(s) for s in _getcomments(br'''
+ ... G
+ ... |
+ ... I D C F # split: B -> E, F, G
+ ... \ \| | # replace: C -> D -> H
+ ... H B E # prune: F, I
+ ... \|/
+ ... A
+ ... ''')]
+ ['split: B -> E, F, G', 'replace: C -> D -> H', 'prune: F, I']
+ """
+ for line in text.splitlines():
+ if b' # ' not in line:
+ continue
+ yield line.split(b' # ', 1)[1].split(b' # ')[0].strip()
+
+@command(b'debugdrawdag', [])
def debugdrawdag(ui, repo, **opts):
"""read an ASCII graph from stdin and create changesets
@@ -296,15 +361,22 @@
# parse the graph and make sure len(parents) <= 2 for each node
edges = _parseasciigraph(text)
- for k, v in edges.iteritems():
+ for k, v in edges.items():
if len(v) > 2:
raise error.Abort(_('%s: too many parents: %s')
- % (k, ' '.join(v)))
+ % (k, b' '.join(v)))
+
+ # parse comments to get extra file content instructions
+ files = collections.defaultdict(dict) # {(name, path): content}
+ comments = list(_getcomments(text))
+ filere = re.compile(br'^(\w+)/([\w/]+)\s*=\s*(.*)$', re.M)
+ for name, path, content in filere.findall(b'\n'.join(comments)):
+ files[name][path] = content.replace(br'\n', b'\n')
committed = {None: node.nullid} # {name: node}
# for leaf nodes, try to find existing nodes in repo
- for name, parents in edges.iteritems():
+ for name, parents in edges.items():
if len(parents) == 0:
try:
committed[name] = scmutil.revsingle(repo, name)
@@ -326,38 +398,37 @@
else:
# If it's not a merge, add a single file
added[name] = name
+ # add extra file contents in comments
+ for path, content in files.get(name, {}).items():
+ added[path] = content
ctx = simplecommitctx(repo, name, pctxs, added)
n = ctx.commit()
committed[name] = n
- tagsmod.tag(repo, name, n, message=None, user=None, date=None,
+ tagsmod.tag(repo, [name], n, message=None, user=None, date=None,
local=True)
# handle special comments
- with repo.wlock(), repo.lock(), repo.transaction('drawdag'):
+ with repo.wlock(), repo.lock(), repo.transaction(b'drawdag'):
getctx = lambda x: repo.unfiltered()[committed[x.strip()]]
- for line in text.splitlines():
- if ' # ' not in line:
- continue
-
+ for comment in comments:
rels = [] # obsolete relationships
- comment = line.split(' # ', 1)[1].split(' # ')[0].strip()
- args = comment.split(':', 1)
+ args = comment.split(b':', 1)
if len(args) <= 1:
continue
cmd = args[0].strip()
arg = args[1].strip()
- if cmd in ('replace', 'rebase', 'amend'):
- nodes = [getctx(m) for m in arg.split('->')]
+ if cmd in (b'replace', b'rebase', b'amend'):
+ nodes = [getctx(m) for m in arg.split(b'->')]
for i in range(len(nodes) - 1):
rels.append((nodes[i], (nodes[i + 1],)))
- elif cmd in ('split',):
- pre, succs = arg.split('->')
- succs = succs.split(',')
+ elif cmd in (b'split',):
+ pre, succs = arg.split(b'->')
+ succs = succs.split(b',')
rels.append((getctx(pre), (getctx(s) for s in succs)))
- elif cmd in ('prune',):
- for n in arg.split(','):
+ elif cmd in (b'prune',):
+ for n in arg.split(b','):
rels.append((getctx(n), ()))
if rels:
obsolete.createmarkers(repo, rels, date=(0, 0), operation=cmd)
--- a/tests/f Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/f Sat Sep 30 07:52:48 2017 -0700
@@ -32,13 +32,22 @@
import re
import sys
+# Python 3 adapters
+ispy3 = (sys.version_info[0] >= 3)
+if ispy3:
+ def iterbytes(s):
+ for i in range(len(s)):
+ yield s[i:i + 1]
+else:
+ iterbytes = iter
+
def visit(opts, filenames, outfile):
"""Process filenames in the way specified in opts, writing output to
outfile."""
for f in sorted(filenames):
isstdin = f == '-'
if not isstdin and not os.path.lexists(f):
- outfile.write('%s: file not found\n' % f)
+ outfile.write(b'%s: file not found\n' % f.encode('utf-8'))
continue
quiet = opts.quiet and not opts.recurse or isstdin
isdir = os.path.isdir(f)
@@ -57,7 +66,7 @@
facts.append('link')
content = os.readlink(f)
elif isstdin:
- content = sys.stdin.read()
+ content = getattr(sys.stdin, 'buffer', sys.stdin).read()
if opts.size:
facts.append('size=%s' % len(content))
elif isdir:
@@ -87,19 +96,19 @@
h = hashlib.sha1(content)
facts.append('sha1=%s' % h.hexdigest()[:opts.bytes])
if isstdin:
- outfile.write(', '.join(facts) + '\n')
+ outfile.write(b', '.join(facts) + b'\n')
elif facts:
- outfile.write('%s: %s\n' % (f, ', '.join(facts)))
+ outfile.write(b'%s: %s\n' % (f.encode('utf-8'), b', '.join(facts)))
elif not quiet:
- outfile.write('%s:\n' % f)
+ outfile.write(b'%s:\n' % f.encode('utf-8'))
if content is not None:
chunk = content
if not islink:
if opts.lines:
if opts.lines >= 0:
- chunk = ''.join(chunk.splitlines(True)[:opts.lines])
+ chunk = b''.join(chunk.splitlines(True)[:opts.lines])
else:
- chunk = ''.join(chunk.splitlines(True)[opts.lines:])
+ chunk = b''.join(chunk.splitlines(True)[opts.lines:])
if opts.bytes:
if opts.bytes >= 0:
chunk = chunk[:opts.bytes]
@@ -108,18 +117,19 @@
if opts.hexdump:
for i in range(0, len(chunk), 16):
s = chunk[i:i + 16]
- outfile.write('%04x: %-47s |%s|\n' %
- (i, ' '.join('%02x' % ord(c) for c in s),
- re.sub('[^ -~]', '.', s)))
+ outfile.write(b'%04x: %-47s |%s|\n' %
+ (i, b' '.join(
+ b'%02x' % ord(c) for c in iterbytes(s)),
+ re.sub(b'[^ -~]', b'.', s)))
if opts.dump:
if not quiet:
- outfile.write('>>>\n')
+ outfile.write(b'>>>\n')
outfile.write(chunk)
if not quiet:
- if chunk.endswith('\n'):
- outfile.write('<<<\n')
+ if chunk.endswith(b'\n'):
+ outfile.write(b'<<<\n')
else:
- outfile.write('\n<<< no trailing newline\n')
+ outfile.write(b'\n<<< no trailing newline\n')
if opts.recurse and dirfiles:
assert not isstdin
visit(opts, dirfiles, outfile)
@@ -156,4 +166,4 @@
if not filenames:
filenames = ['-']
- visit(opts, filenames, sys.stdout)
+ visit(opts, filenames, getattr(sys.stdout, 'buffer', sys.stdout))
--- a/tests/failfilemerge.py Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/failfilemerge.py Sat Sep 30 07:52:48 2017 -0700
@@ -9,7 +9,8 @@
)
def failfilemerge(filemergefn,
- premerge, repo, mynode, orig, fcd, fco, fca, labels=None):
+ premerge, repo, wctx, mynode, orig, fcd, fco, fca,
+ labels=None):
raise error.Abort("^C")
return filemergefn(premerge, repo, mynode, orig, fcd, fco, fca, labels)
--- a/tests/hghave.py Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/hghave.py Sat Sep 30 07:52:48 2017 -0700
@@ -652,3 +652,12 @@
@check("fsmonitor", "running tests with fsmonitor")
def has_fsmonitor():
return 'HGFSMONITOR_TESTS' in os.environ
+
+@check("fuzzywuzzy", "Fuzzy string matching library")
+def has_fuzzywuzzy():
+ try:
+ import fuzzywuzzy
+ fuzzywuzzy.__version__
+ return True
+ except ImportError:
+ return False
--- a/tests/md5sum.py Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/md5sum.py Sat Sep 30 07:52:48 2017 -0700
@@ -8,17 +8,11 @@
from __future__ import absolute_import
+import hashlib
import os
import sys
try:
- import hashlib
- md5 = hashlib.md5
-except ImportError:
- import md5
- md5 = md5.md5
-
-try:
import msvcrt
msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
@@ -32,7 +26,7 @@
sys.stderr.write('%s: Can\'t open: %s\n' % (filename, msg))
sys.exit(1)
- m = md5()
+ m = hashlib.md5()
try:
for data in iter(lambda: fp.read(8192), b''):
m.update(data)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/mocktime.py Sat Sep 30 07:52:48 2017 -0700
@@ -0,0 +1,18 @@
+from __future__ import absolute_import
+
+import os
+import time
+
+class mocktime(object):
+ def __init__(self, increment):
+ self.time = 0
+ self.increment = [float(s) for s in increment.split()]
+ self.pos = 0
+
+ def __call__(self):
+ self.time += self.increment[self.pos % len(self.increment)]
+ self.pos += 1
+ return self.time
+
+def uisetup(ui):
+ time.time = mocktime(os.environ.get('MOCKTIME', '0.1'))
--- a/tests/notcapable Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/notcapable Sat Sep 30 07:52:48 2017 -0700
@@ -6,9 +6,9 @@
fi
cat > notcapable-$CAP.py << EOF
-from mercurial import extensions, peer, localrepo
+from mercurial import extensions, localrepo, repository
def extsetup():
- extensions.wrapfunction(peer.peerrepository, 'capable', wrapcapable)
+ extensions.wrapfunction(repository.peer, 'capable', wrapcapable)
extensions.wrapfunction(localrepo.localrepository, 'peer', wrappeer)
def wrapcapable(orig, self, name, *args, **kwargs):
if name in '$CAP'.split(' '):
--- a/tests/run-tests.py Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/run-tests.py Sat Sep 30 07:52:48 2017 -0700
@@ -94,20 +94,61 @@
try: # is pygments installed
import pygments
import pygments.lexers as lexers
+ import pygments.lexer as lexer
import pygments.formatters as formatters
+ import pygments.token as token
+ import pygments.style as style
pygmentspresent = True
difflexer = lexers.DiffLexer()
terminal256formatter = formatters.Terminal256Formatter()
except ImportError:
pass
+if pygmentspresent:
+ class TestRunnerStyle(style.Style):
+ default_style = ""
+ skipped = token.string_to_tokentype("Token.Generic.Skipped")
+ failed = token.string_to_tokentype("Token.Generic.Failed")
+ skippedname = token.string_to_tokentype("Token.Generic.SName")
+ failedname = token.string_to_tokentype("Token.Generic.FName")
+ styles = {
+ skipped: '#e5e5e5',
+ skippedname: '#00ffff',
+ failed: '#7f0000',
+ failedname: '#ff0000',
+ }
+
+ class TestRunnerLexer(lexer.RegexLexer):
+ tokens = {
+ 'root': [
+ (r'^Skipped', token.Generic.Skipped, 'skipped'),
+ (r'^Failed ', token.Generic.Failed, 'failed'),
+ (r'^ERROR: ', token.Generic.Failed, 'failed'),
+ ],
+ 'skipped': [
+ (r'[\w-]+\.(t|py)', token.Generic.SName),
+ (r':.*', token.Generic.Skipped),
+ ],
+ 'failed': [
+ (r'[\w-]+\.(t|py)', token.Generic.FName),
+ (r'(:| ).*', token.Generic.Failed),
+ ]
+ }
+
+ runnerformatter = formatters.Terminal256Formatter(style=TestRunnerStyle)
+ runnerlexer = TestRunnerLexer()
+
if sys.version_info > (3, 5, 0):
PYTHON3 = True
xrange = range # we use xrange in one place, and we'd rather not use range
def _bytespath(p):
+ if p is None:
+ return p
return p.encode('utf-8')
def _strpath(p):
+ if p is None:
+ return p
return p.decode('utf-8')
elif sys.version_info >= (3, 0, 0):
@@ -262,6 +303,8 @@
help="skip tests listed in the specified blacklist file")
parser.add_option("--whitelist", action="append",
help="always run tests listed in the specified whitelist file")
+ parser.add_option("--test-list", action="append",
+ help="read tests to run from the specified file")
parser.add_option("--changed", type="string",
help="run tests that are changed in parent rev or working directory")
parser.add_option("-C", "--annotate", action="store_true",
@@ -365,6 +408,10 @@
metavar="known_good_rev",
help=("Automatically bisect any failures using this "
"revision as a known-good revision."))
+ parser.add_option('--bisect-repo', type="string",
+ metavar='bisect_repo',
+ help=("Path of a repo to bisect. Use together with "
+ "--known-good-rev"))
for option, (envvar, default) in defaults.items():
defaults[option] = type(default)(os.environ.get(envvar, default))
@@ -417,6 +464,9 @@
sys.stderr.write('warning: --color=always ignored because '
'pygments is not installed\n')
+ if options.bisect_repo and not options.known_good_rev:
+ parser.error("--bisect-repo cannot be used without --known-good-rev")
+
global useipv6
if options.ipv6:
useipv6 = checksocketfamily('AF_INET6')
@@ -570,6 +620,19 @@
print()
sys.stdout.flush()
+def highlightdiff(line, color):
+ if not color:
+ return line
+ assert pygmentspresent
+ return pygments.highlight(line.decode('latin1'), difflexer,
+ terminal256formatter).encode('latin1')
+
+def highlightmsg(msg, color):
+ if not color:
+ return msg
+ assert pygmentspresent
+ return pygments.highlight(msg, runnerlexer, runnerformatter)
+
def terminate(proc):
"""Terminate subprocess"""
vlog('# Terminating process %d' % proc.pid)
@@ -596,10 +659,10 @@
def __init__(self, path, outputdir, tmpdir, keeptmpdir=False,
debug=False,
- timeout=defaults['timeout'],
- startport=defaults['port'], extraconfigopts=None,
+ timeout=None,
+ startport=None, extraconfigopts=None,
py3kwarnings=False, shell=None, hgcommand=None,
- slowtimeout=defaults['slowtimeout'], usechg=False,
+ slowtimeout=None, usechg=False,
useipv6=False):
"""Create a test from parameters.
@@ -631,6 +694,12 @@
shell is the shell to execute tests in.
"""
+ if timeout is None:
+ timeout = defaults['timeout']
+ if startport is None:
+ startport = defaults['port']
+ if slowtimeout is None:
+ slowtimeout = defaults['slowtimeout']
self.path = path
self.bname = os.path.basename(path)
self.name = _strpath(self.bname)
@@ -1196,7 +1265,7 @@
if ret != 0:
return False, stdout
- if 'slow' in reqs:
+ if b'slow' in reqs:
self._timeout = self._slowtimeout
return True, None
@@ -1359,7 +1428,7 @@
while i < len(els):
el = els[i]
- r = TTest.linematch(el, lout)
+ r = self.linematch(el, lout)
if isinstance(r, str):
if r == '+glob':
lout = el[:-1] + ' (glob)\n'
@@ -1383,11 +1452,10 @@
else:
m = optline.match(el)
if m:
- conditions = [c for c in m.group(2).split(' ')]
-
- if self._hghave(conditions)[0]:
- lout = el
- else:
+ conditions = [
+ c for c in m.group(2).split(b' ')]
+
+ if not self._iftest(conditions):
optional.append(i)
i += 1
@@ -1416,9 +1484,16 @@
while expected.get(pos, None):
el = expected[pos].pop(0)
if el:
- if (not optline.match(el)
- and not el.endswith(b" (?)\n")):
- break
+ if not el.endswith(b" (?)\n"):
+ m = optline.match(el)
+ if m:
+ conditions = [c for c in m.group(2).split(b' ')]
+
+ if self._iftest(conditions):
+ # Don't append as optional line
+ continue
+ else:
+ continue
postout.append(b' ' + el)
if lcmd:
@@ -1481,8 +1556,7 @@
res += re.escape(c)
return TTest.rematch(res, l)
- @staticmethod
- def linematch(el, l):
+ def linematch(self, el, l):
retry = False
if el == l: # perfect match (fast)
return True
@@ -1493,8 +1567,11 @@
else:
m = optline.match(el)
if m:
+ conditions = [c for c in m.group(2).split(b' ')]
+
el = m.group(1) + b"\n"
- retry = "retry"
+ if not self._iftest(conditions):
+ retry = "retry" # Not required by listed features
if el.endswith(b" (esc)\n"):
if PYTHON3:
@@ -1586,7 +1663,8 @@
self.stream.write('t')
else:
if not self._options.nodiff:
- self.stream.write('\nERROR: %s output changed\n' % test)
+ formatted = '\nERROR: %s output changed\n' % test
+ self.stream.write(highlightmsg(formatted, self.color))
self.stream.write('!')
self.stream.flush()
@@ -1652,10 +1730,7 @@
else:
self.stream.write('\n')
for line in lines:
- if self.color:
- line = pygments.highlight(line,
- difflexer,
- terminal256formatter)
+ line = highlightdiff(line, self.color)
if PYTHON3:
self.stream.flush()
self.stream.buffer.write(line)
@@ -1778,7 +1853,7 @@
result.addSkip(test, "Doesn't exist")
continue
- if not (self._whitelist and test.name in self._whitelist):
+ if not (self._whitelist and test.bname in self._whitelist):
if self._blacklist and test.bname in self._blacklist:
result.addSkip(test, 'blacklisted')
continue
@@ -1988,9 +2063,11 @@
if not self._runner.options.noskips:
for test, msg in result.skipped:
- self.stream.writeln('Skipped %s: %s' % (test.name, msg))
+ formatted = 'Skipped %s: %s\n' % (test.name, msg)
+ self.stream.write(highlightmsg(formatted, result.color))
for test, msg in result.failures:
- self.stream.writeln('Failed %s: %s' % (test.name, msg))
+ formatted = 'Failed %s: %s\n' % (test.name, msg)
+ self.stream.write(highlightmsg(formatted, result.color))
for test, msg in result.errors:
self.stream.writeln('Errored %s: %s' % (test.name, msg))
@@ -2008,28 +2085,37 @@
savetimes(self._runner._outputdir, result)
if failed and self._runner.options.known_good_rev:
+ bisectcmd = ['hg', 'bisect']
+ bisectrepo = self._runner.options.bisect_repo
+ if bisectrepo:
+ bisectcmd.extend(['-R', os.path.abspath(bisectrepo)])
def nooutput(args):
p = subprocess.Popen(args, stderr=subprocess.STDOUT,
stdout=subprocess.PIPE)
p.stdout.read()
p.wait()
for test, msg in result.failures:
- nooutput(['hg', 'bisect', '--reset']),
- nooutput(['hg', 'bisect', '--bad', '.'])
- nooutput(['hg', 'bisect', '--good',
+ nooutput(bisectcmd + ['--reset']),
+ nooutput(bisectcmd + ['--bad', '.'])
+ nooutput(bisectcmd + ['--good',
self._runner.options.known_good_rev])
- # TODO: we probably need to forward some options
+ # TODO: we probably need to forward more options
# that alter hg's behavior inside the tests.
- rtc = '%s %s %s' % (sys.executable, sys.argv[0], test)
- sub = subprocess.Popen(['hg', 'bisect', '--command', rtc],
+ opts = ''
+ withhg = self._runner.options.with_hg
+ if withhg:
+ opts += ' --with-hg=%s ' % shellquote(_strpath(withhg))
+ rtc = '%s %s %s %s' % (sys.executable, sys.argv[0], opts,
+ test)
+ sub = subprocess.Popen(bisectcmd + ['--command', rtc],
stderr=subprocess.STDOUT,
stdout=subprocess.PIPE)
data = sub.stdout.read()
sub.wait()
m = re.search(
- (r'\nThe first (?P<goodbad>bad|good) revision '
- r'is:\nchangeset: +\d+:(?P<node>[a-f0-9]+)\n.*\n'
- r'summary: +(?P<summary>[^\n]+)\n'),
+ (br'\nThe first (?P<goodbad>bad|good) revision '
+ br'is:\nchangeset: +\d+:(?P<node>[a-f0-9]+)\n.*\n'
+ br'summary: +(?P<summary>[^\n]+)\n'),
data, (re.MULTILINE | re.DOTALL))
if m is None:
self.stream.writeln(
@@ -2108,7 +2194,8 @@
# the skip message as a text node instead.
t = doc.createElement('testcase')
t.setAttribute('name', tc.name)
- message = cdatasafe(message).decode('utf-8', 'replace')
+ binmessage = message.encode('utf-8')
+ message = cdatasafe(binmessage).decode('utf-8', 'replace')
cd = doc.createCDATASection(message)
skipelem = doc.createElement('skipped')
skipelem.appendChild(cd)
@@ -2201,6 +2288,10 @@
# positional arguments are paths to test files to run, so
# we make sure they're all bytestrings
args = [_bytespath(a) for a in args]
+ if options.test_list is not None:
+ for listfile in options.test_list:
+ with open(listfile, 'rb') as f:
+ args.extend(t for t in f.read().splitlines() if t)
self.options = options
self._checktools()
--- a/tests/test-acl.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-acl.t Sat Sep 30 07:52:48 2017 -0700
@@ -92,13 +92,13 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 4 parts total
- bundle2-output-part: "replycaps" 155 bytes payload
+ bundle2-output-part: "replycaps" 168 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 155
+ bundle2-input-part: total payload size 168
bundle2-input-part: "check:heads" supported
bundle2-input-part: total payload size 20
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
@@ -154,13 +154,13 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 4 parts total
- bundle2-output-part: "replycaps" 155 bytes payload
+ bundle2-output-part: "replycaps" 168 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 155
+ bundle2-input-part: total payload size 168
bundle2-input-part: "check:heads" supported
bundle2-input-part: total payload size 20
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
@@ -219,13 +219,13 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 4 parts total
- bundle2-output-part: "replycaps" 155 bytes payload
+ bundle2-output-part: "replycaps" 168 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 155
+ bundle2-input-part: total payload size 168
bundle2-input-part: "check:heads" supported
bundle2-input-part: total payload size 20
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
@@ -294,13 +294,13 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 4 parts total
- bundle2-output-part: "replycaps" 155 bytes payload
+ bundle2-output-part: "replycaps" 168 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 155
+ bundle2-input-part: total payload size 168
bundle2-input-part: "check:heads" supported
bundle2-input-part: total payload size 20
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
@@ -358,13 +358,13 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 4 parts total
- bundle2-output-part: "replycaps" 155 bytes payload
+ bundle2-output-part: "replycaps" 168 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 155
+ bundle2-input-part: total payload size 168
bundle2-input-part: "check:heads" supported
bundle2-input-part: total payload size 20
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
@@ -427,13 +427,13 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 4 parts total
- bundle2-output-part: "replycaps" 155 bytes payload
+ bundle2-output-part: "replycaps" 168 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 155
+ bundle2-input-part: total payload size 168
bundle2-input-part: "check:heads" supported
bundle2-input-part: total payload size 20
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
@@ -493,13 +493,13 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 4 parts total
- bundle2-output-part: "replycaps" 155 bytes payload
+ bundle2-output-part: "replycaps" 168 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 155
+ bundle2-input-part: total payload size 168
bundle2-input-part: "check:heads" supported
bundle2-input-part: total payload size 20
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
@@ -564,13 +564,13 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 4 parts total
- bundle2-output-part: "replycaps" 155 bytes payload
+ bundle2-output-part: "replycaps" 168 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 155
+ bundle2-input-part: total payload size 168
bundle2-input-part: "check:heads" supported
bundle2-input-part: total payload size 20
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
@@ -632,13 +632,13 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 4 parts total
- bundle2-output-part: "replycaps" 155 bytes payload
+ bundle2-output-part: "replycaps" 168 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 155
+ bundle2-input-part: total payload size 168
bundle2-input-part: "check:heads" supported
bundle2-input-part: total payload size 20
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
@@ -702,13 +702,13 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 4 parts total
- bundle2-output-part: "replycaps" 155 bytes payload
+ bundle2-output-part: "replycaps" 168 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 155
+ bundle2-input-part: total payload size 168
bundle2-input-part: "check:heads" supported
bundle2-input-part: total payload size 20
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
@@ -784,13 +784,13 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 4 parts total
- bundle2-output-part: "replycaps" 155 bytes payload
+ bundle2-output-part: "replycaps" 168 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 155
+ bundle2-input-part: total payload size 168
bundle2-input-part: "check:heads" supported
bundle2-input-part: total payload size 20
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
@@ -861,13 +861,13 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 4 parts total
- bundle2-output-part: "replycaps" 155 bytes payload
+ bundle2-output-part: "replycaps" 168 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 155
+ bundle2-input-part: total payload size 168
bundle2-input-part: "check:heads" supported
bundle2-input-part: total payload size 20
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
@@ -933,13 +933,13 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 4 parts total
- bundle2-output-part: "replycaps" 155 bytes payload
+ bundle2-output-part: "replycaps" 168 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 155
+ bundle2-input-part: total payload size 168
bundle2-input-part: "check:heads" supported
bundle2-input-part: total payload size 20
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
@@ -1016,13 +1016,13 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 4 parts total
- bundle2-output-part: "replycaps" 155 bytes payload
+ bundle2-output-part: "replycaps" 168 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 155
+ bundle2-input-part: total payload size 168
bundle2-input-part: "check:heads" supported
bundle2-input-part: total payload size 20
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
@@ -1101,13 +1101,13 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 4 parts total
- bundle2-output-part: "replycaps" 155 bytes payload
+ bundle2-output-part: "replycaps" 168 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 155
+ bundle2-input-part: total payload size 168
bundle2-input-part: "check:heads" supported
bundle2-input-part: total payload size 20
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
@@ -1182,13 +1182,13 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 4 parts total
- bundle2-output-part: "replycaps" 155 bytes payload
+ bundle2-output-part: "replycaps" 168 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 155
+ bundle2-input-part: total payload size 168
bundle2-input-part: "check:heads" supported
bundle2-input-part: total payload size 20
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
@@ -1257,13 +1257,13 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 4 parts total
- bundle2-output-part: "replycaps" 155 bytes payload
+ bundle2-output-part: "replycaps" 168 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 155
+ bundle2-input-part: total payload size 168
bundle2-input-part: "check:heads" supported
bundle2-input-part: total payload size 20
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
@@ -1339,13 +1339,13 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 4 parts total
- bundle2-output-part: "replycaps" 155 bytes payload
+ bundle2-output-part: "replycaps" 168 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 155
+ bundle2-input-part: total payload size 168
bundle2-input-part: "check:heads" supported
bundle2-input-part: total payload size 20
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
@@ -1454,14 +1454,14 @@
911600dab2ae7a9baff75958b84fe606851ce955
e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 155 bytes payload
+ bundle2-output-part: "replycaps" 168 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 155
+ bundle2-input-part: total payload size 168
bundle2-input-part: "check:heads" supported
bundle2-input-part: total payload size 20
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
@@ -1542,14 +1542,14 @@
911600dab2ae7a9baff75958b84fe606851ce955
e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 155 bytes payload
+ bundle2-output-part: "replycaps" 168 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 155
+ bundle2-input-part: total payload size 168
bundle2-input-part: "check:heads" supported
bundle2-input-part: total payload size 20
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
@@ -1616,14 +1616,14 @@
911600dab2ae7a9baff75958b84fe606851ce955
e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 155 bytes payload
+ bundle2-output-part: "replycaps" 168 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 155
+ bundle2-input-part: total payload size 168
bundle2-input-part: "check:heads" supported
bundle2-input-part: total payload size 20
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
@@ -1686,14 +1686,14 @@
911600dab2ae7a9baff75958b84fe606851ce955
e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 155 bytes payload
+ bundle2-output-part: "replycaps" 168 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 155
+ bundle2-input-part: total payload size 168
bundle2-input-part: "check:heads" supported
bundle2-input-part: total payload size 20
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
@@ -1750,14 +1750,14 @@
911600dab2ae7a9baff75958b84fe606851ce955
e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 155 bytes payload
+ bundle2-output-part: "replycaps" 168 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 155
+ bundle2-input-part: total payload size 168
bundle2-input-part: "check:heads" supported
bundle2-input-part: total payload size 20
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
@@ -1843,14 +1843,14 @@
911600dab2ae7a9baff75958b84fe606851ce955
e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 155 bytes payload
+ bundle2-output-part: "replycaps" 168 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 155
+ bundle2-input-part: total payload size 168
bundle2-input-part: "check:heads" supported
bundle2-input-part: total payload size 20
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
@@ -1935,14 +1935,14 @@
911600dab2ae7a9baff75958b84fe606851ce955
e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 155 bytes payload
+ bundle2-output-part: "replycaps" 168 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 155
+ bundle2-input-part: total payload size 168
bundle2-input-part: "check:heads" supported
bundle2-input-part: total payload size 20
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
@@ -2004,14 +2004,14 @@
911600dab2ae7a9baff75958b84fe606851ce955
e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 155 bytes payload
+ bundle2-output-part: "replycaps" 168 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 155
+ bundle2-input-part: total payload size 168
bundle2-input-part: "check:heads" supported
bundle2-input-part: total payload size 20
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
@@ -2090,14 +2090,14 @@
911600dab2ae7a9baff75958b84fe606851ce955
e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 155 bytes payload
+ bundle2-output-part: "replycaps" 168 bytes payload
bundle2-output-part: "check:heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 155
+ bundle2-input-part: total payload size 168
bundle2-input-part: "check:heads" supported
bundle2-input-part: total payload size 20
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
--- a/tests/test-add.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-add.t Sat Sep 30 07:52:48 2017 -0700
@@ -44,14 +44,14 @@
abort: ui.portablefilenames value is invalid ('jump')
[255]
$ hg --config ui.portablefilenames=abort add con.xml
- abort: filename contains 'con', which is reserved on Windows: 'con.xml'
+ abort: filename contains 'con', which is reserved on Windows: con.xml
[255]
$ hg st
A a
A b
? con.xml
$ hg add con.xml
- warning: filename contains 'con', which is reserved on Windows: 'con.xml'
+ warning: filename contains 'con', which is reserved on Windows: con.xml
$ hg st
A a
A b
--- a/tests/test-amend.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-amend.t Sat Sep 30 07:52:48 2017 -0700
@@ -11,7 +11,7 @@
#if obsstore-on
$ cat << EOF >> $HGRCPATH
> [experimental]
- > evolution=createmarkers
+ > stabilization=createmarkers
> EOF
#endif
@@ -28,9 +28,9 @@
$ hg update B -q
$ echo 2 >> B
+ $ hg amend
+ saved backup bundle to $TESTTMP/repo1/.hg/strip-backup/112478962961-7e959a55-amend.hg (glob) (obsstore-off !)
#if obsstore-off
- $ hg amend
- saved backup bundle to $TESTTMP/repo1/.hg/strip-backup/112478962961-af2c0941-amend.hg (glob)
$ hg log -p -G --hidden -T '{rev} {node|short} {desc}\n'
@ 1 be169c7e8dbe B
| diff --git a/B b/B
@@ -50,9 +50,8 @@
\ No newline at end of file
#else
- $ hg amend
$ hg log -p -G --hidden -T '{rev} {node|short} {desc}\n'
- @ 3 be169c7e8dbe B
+ @ 2 be169c7e8dbe B
| diff --git a/B b/B
| new file mode 100644
| --- /dev/null
@@ -60,15 +59,6 @@
| @@ -0,0 +1,1 @@
| +B2
|
- | x 2 edf08988b141 temporary amend commit for 112478962961
- | | diff --git a/B b/B
- | | --- a/B
- | | +++ b/B
- | | @@ -1,1 +1,1 @@
- | | -B
- | | \ No newline at end of file
- | | +B2
- | |
| x 1 112478962961 B
|/ diff --git a/B b/B
| new file mode 100644
@@ -95,17 +85,27 @@
nothing changed
[1]
+ $ hg amend -d "0 0"
+ nothing changed
+ [1]
+
+ $ hg amend -d "Thu Jan 01 00:00:00 1970 UTC"
+ nothing changed
+ [1]
+
Matcher and metadata options
$ echo 3 > C
$ echo 4 > D
$ hg add C D
- $ hg amend -m NEWMESSAGE -I C -q
+ $ hg amend -m NEWMESSAGE -I C
+ saved backup bundle to $TESTTMP/repo1/.hg/strip-backup/be169c7e8dbe-7684ddc5-amend.hg (glob) (obsstore-off !)
$ hg log -r . -T '{node|short} {desc} {files}\n'
c7ba14d9075b NEWMESSAGE B C
$ echo 5 > E
$ rm C
- $ hg amend -d '2000 1000' -u 'Foo <foo@example.com>' -A C D -q
+ $ hg amend -d '2000 1000' -u 'Foo <foo@example.com>' -A C D
+ saved backup bundle to $TESTTMP/repo1/.hg/strip-backup/c7ba14d9075b-b3e76daa-amend.hg (glob) (obsstore-off !)
$ hg log -r . -T '{node|short} {desc} {files} {author} {date}\n'
14f6c4bcc865 NEWMESSAGE B D Foo <foo@example.com> 2000.01000
@@ -118,10 +118,12 @@
> EOF
$ chmod +x $TESTTMP/prefix.sh
- $ HGEDITOR="sh $TESTTMP/prefix.sh" hg amend --edit -q
+ $ HGEDITOR="sh $TESTTMP/prefix.sh" hg amend --edit
+ saved backup bundle to $TESTTMP/repo1/.hg/strip-backup/14f6c4bcc865-6591f15d-amend.hg (glob) (obsstore-off !)
$ hg log -r . -T '{node|short} {desc}\n'
298f085230c3 EDITED: NEWMESSAGE
- $ HGEDITOR="sh $TESTTMP/prefix.sh" hg amend -e -m MSG -q
+ $ HGEDITOR="sh $TESTTMP/prefix.sh" hg amend -e -m MSG
+ saved backup bundle to $TESTTMP/repo1/.hg/strip-backup/298f085230c3-d81a6ad3-amend.hg (glob) (obsstore-off !)
$ hg log -r . -T '{node|short} {desc}\n'
974f07f28537 EDITED: MSG
@@ -129,7 +131,8 @@
$ hg amend -l $TESTTMP/msg -m BAR
abort: options --message and --logfile are mutually exclusive
[255]
- $ hg amend -l $TESTTMP/msg -q
+ $ hg amend -l $TESTTMP/msg
+ saved backup bundle to $TESTTMP/repo1/.hg/strip-backup/974f07f28537-edb6470a-amend.hg (glob) (obsstore-off !)
$ hg log -r . -T '{node|short} {desc}\n'
507be9bdac71 FOO
@@ -137,7 +140,7 @@
$ touch F G
$ hg add F G
- $ cat <<EOS | hg amend -i --config ui.interactive=1 -q
+ $ cat <<EOS | hg amend -i --config ui.interactive=1
> y
> n
> EOS
@@ -149,6 +152,7 @@
new file mode 100644
examine changes to 'G'? [Ynesfdaq?] n
+ saved backup bundle to $TESTTMP/repo1/.hg/strip-backup/507be9bdac71-c8077452-amend.hg (glob) (obsstore-off !)
$ hg log -r . -T '{files}\n'
B D F
@@ -176,12 +180,12 @@
$ cat >> $HGRCPATH <<EOF
> [experimental]
- > evolution=createmarkers, allowunstable
+ > stabilization=createmarkers, allowunstable
> EOF
$ hg amend
$ hg log -T '{rev} {node|short} {desc}\n' -G
- @ 4 be169c7e8dbe B
+ @ 3 be169c7e8dbe B
|
| o 2 26805aba1e60 C
| |
@@ -195,7 +199,7 @@
$ hg phase -r A --public
$ hg update -C -q A
- $ hg amend -m AMEND -q
+ $ hg amend -m AMEND
abort: cannot amend public changesets
[255]
@@ -209,7 +213,8 @@
> A B
> EOS
$ hg update -q C
- $ hg amend -m FOO -q
+ $ hg amend -m FOO
+ saved backup bundle to $TESTTMP/repo3/.hg/strip-backup/a35c07e8a2a4-15ff4612-amend.hg (glob) (obsstore-off !)
$ rm .hg/localtags
$ hg log -G -T '{desc}\n'
@ FOO
--- a/tests/test-annotate.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-annotate.t Sat Sep 30 07:52:48 2017 -0700
@@ -400,7 +400,8 @@
and its ancestor by overriding "repo._filecommit".
$ cat > ../legacyrepo.py <<EOF
- > from mercurial import node, error
+ > from __future__ import absolute_import
+ > from mercurial import error, node
> def reposetup(ui, repo):
> class legacyrepo(repo.__class__):
> def _filecommit(self, fctx, manifest1, manifest2,
--- a/tests/test-archive.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-archive.t Sat Sep 30 07:52:48 2017 -0700
@@ -211,15 +211,12 @@
> done
$ cat > md5comp.py <<EOF
- > from __future__ import print_function
- > try:
- > from hashlib import md5
- > except ImportError:
- > from md5 import md5
+ > from __future__ import absolute_import, print_function
+ > import hashlib
> import sys
> f1, f2 = sys.argv[1:3]
- > h1 = md5(open(f1, 'rb').read()).hexdigest()
- > h2 = md5(open(f2, 'rb').read()).hexdigest()
+ > h1 = hashlib.md5(open(f1, 'rb').read()).hexdigest()
+ > h2 = hashlib.md5(open(f2, 'rb').read()).hexdigest()
> print(h1 == h2 or "md5 differ: " + repr((h1, h2)))
> EOF
@@ -357,8 +354,9 @@
$ hg -R repo add repo/a
$ hg -R repo commit -m '#0' -d '456789012 21600'
$ cat > show_mtime.py <<EOF
- > from __future__ import print_function
- > import sys, os
+ > from __future__ import absolute_import, print_function
+ > import os
+ > import sys
> print(int(os.stat(sys.argv[1]).st_mtime))
> EOF
--- a/tests/test-batching.py Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-batching.py Sat Sep 30 07:52:48 2017 -0700
@@ -8,7 +8,9 @@
from __future__ import absolute_import, print_function
from mercurial import (
+ error,
peer,
+ util,
wireproto,
)
@@ -27,9 +29,9 @@
return "%s und %s" % (b, a,)
def greet(self, name=None):
return "Hello, %s" % name
- def batch(self):
+ def batchiter(self):
'''Support for local batching.'''
- return peer.localbatch(self)
+ return peer.localiterbatcher(self)
# usage of "thing" interface
def use(it):
@@ -41,29 +43,54 @@
print(it.foo("Un", two="Deux"))
print(it.bar("Eins", "Zwei"))
- # Batched call to a couple of (possibly proxied) methods.
- batch = it.batch()
+ # Batched call to a couple of proxied methods.
+ batch = it.batchiter()
# The calls return futures to eventually hold results.
foo = batch.foo(one="One", two="Two")
- foo2 = batch.foo(None)
bar = batch.bar("Eins", "Zwei")
- # We can call non-batchable proxy methods, but the break the current batch
- # request and cause additional roundtrips.
- greet = batch.greet(name="John Smith")
- # We can also add local methods into the mix, but they break the batch too.
- hello = batch.hello()
bar2 = batch.bar(b="Uno", a="Due")
- # Only now are all the calls executed in sequence, with as few roundtrips
- # as possible.
+
+ # Future shouldn't be set until we submit().
+ assert isinstance(foo, peer.future)
+ assert not util.safehasattr(foo, 'value')
+ assert not util.safehasattr(bar, 'value')
batch.submit()
- # After the call to submit, the futures actually contain values.
+ # Call results() to obtain results as a generator.
+ results = batch.results()
+
+ # Future results shouldn't be set until we consume a value.
+ assert not util.safehasattr(foo, 'value')
+ foovalue = next(results)
+ assert util.safehasattr(foo, 'value')
+ assert foovalue == foo.value
print(foo.value)
- print(foo2.value)
+ next(results)
print(bar.value)
- print(greet.value)
- print(hello.value)
+ next(results)
print(bar2.value)
+ # We should be at the end of the results generator.
+ try:
+ next(results)
+ except StopIteration:
+ print('proper end of results generator')
+ else:
+ print('extra emitted element!')
+
+ # Attempting to call a non-batchable method inside a batch fails.
+ batch = it.batchiter()
+ try:
+ batch.greet(name='John Smith')
+ except error.ProgrammingError as e:
+ print(e)
+
+ # Attempting to call a local method inside a batch fails.
+ batch = it.batchiter()
+ try:
+ batch.hello()
+ except error.ProgrammingError as e:
+ print(e)
+
# local usage
mylocal = localthing()
print()
@@ -146,15 +173,14 @@
req.append(name + ':' + args)
req = ';'.join(req)
res = self._submitone('batch', [('cmds', req,)])
- return res.split(';')
+ for r in res.split(';'):
+ yield r
- def batch(self):
- return wireproto.remotebatch(self)
+ def batchiter(self):
+ return wireproto.remoteiterbatcher(self)
@peer.batchable
def foo(self, one, two=None):
- if not one:
- yield "Nope", None
encargs = [('one', mangle(one),), ('two', mangle(two),)]
encresref = peer.future()
yield encargs, encresref
--- a/tests/test-batching.py.out Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-batching.py.out Sat Sep 30 07:52:48 2017 -0700
@@ -4,11 +4,9 @@
Un and Deux
Eins und Zwei
One and Two
-Nope
Eins und Zwei
-Hello, John Smith
-Ready.
Uno und Due
+proper end of results generator
== Remote
Ready.
@@ -18,15 +16,11 @@
REQ: bar?b=Fjot&a=[xfj
-> Fjot!voe![xfj
Eins und Zwei
-REQ: batch?cmds=foo:one=Pof,two=Uxp;bar:b=Fjot,a=[xfj
- -> Pof!boe!Uxp;Fjot!voe![xfj
-REQ: greet?name=Kpio!Tnjui
- -> Ifmmp-!Kpio!Tnjui
-REQ: batch?cmds=bar:b=Vop,a=Evf
- -> Vop!voe!Evf
+REQ: batch?cmds=foo:one=Pof,two=Uxp;bar:b=Fjot,a=[xfj;bar:b=Vop,a=Evf
+ -> Pof!boe!Uxp;Fjot!voe![xfj;Vop!voe!Evf
One and Two
-Nope
Eins und Zwei
-Hello, John Smith
-Ready.
Uno und Due
+proper end of results generator
+Attempted to batch a non-batchable call to 'greet'
+Attempted to batch a non-batchable call to 'hello'
--- a/tests/test-bisect.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-bisect.t Sat Sep 30 07:52:48 2017 -0700
@@ -184,6 +184,14 @@
$ hg bisect -r
$ hg bisect -b
+ $ hg status -v
+ # The repository is in an unfinished *bisect* state.
+
+ # To mark the changeset good: hg bisect --good
+ # To mark the changeset bad: hg bisect --bad
+ # To abort: hg bisect --reset
+
+ $ hg status -v --config commands.status.skipstates=bisect
$ hg summary
parent: 31:58c80a7c8a40 tip
msg 31
@@ -454,9 +462,10 @@
$ cat > script.py <<EOF
> #!$PYTHON
+ > from __future__ import absolute_import
> import sys
- > from mercurial import ui, hg
- > repo = hg.repository(ui.ui.load(), '.')
+ > from mercurial import hg, ui as uimod
+ > repo = hg.repository(uimod.ui.load(), '.')
> if repo['.'].rev() < 6:
> sys.exit(1)
> EOF
@@ -565,7 +574,7 @@
$ cat >> $HGRCPATH << EOF
> [experimental]
- > evolution=createmarkers
+ > stabilization=createmarkers
> EOF
tip is obsolete
--- a/tests/test-blackbox.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-blackbox.t Sat Sep 30 07:52:48 2017 -0700
@@ -6,6 +6,7 @@
> mq=
> [alias]
> confuse = log --limit 3
+ > so-confusing = confuse --style compact
> EOF
$ hg init blackboxtest
$ cd blackboxtest
@@ -15,22 +16,30 @@
$ echo a > a
$ hg add a
$ hg blackbox --config blackbox.dirty=True
+ 1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000 (5000)> init blackboxtest exited 0 after * seconds (glob)
1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000 (5000)> add a
1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000 (5000)> add a exited 0 after * seconds (glob)
1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000+ (5000)> blackbox
alias expansion is logged
+ $ rm ./.hg/blackbox.log
$ hg confuse
$ hg blackbox
- 1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000 (5000)> add a
- 1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000 (5000)> add a exited 0 after * seconds (glob)
- 1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000+ (5000)> blackbox
- 1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000+ (5000)> blackbox --config *blackbox.dirty=True* exited 0 after * seconds (glob)
1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000 (5000)> confuse
1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000 (5000)> alias 'confuse' expands to 'log --limit 3'
1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000 (5000)> confuse exited 0 after * seconds (glob)
1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000 (5000)> blackbox
+recursive aliases work correctly
+ $ rm ./.hg/blackbox.log
+ $ hg so-confusing
+ $ hg blackbox
+ 1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000 (5000)> so-confusing
+ 1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000 (5000)> alias 'so-confusing' expands to 'confuse --style compact'
+ 1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000 (5000)> alias 'confuse' expands to 'log --limit 3'
+ 1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000 (5000)> so-confusing exited 0 after * seconds (glob)
+ 1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000 (5000)> blackbox
+
incoming change tracking
create two heads to verify that we only see one change in the log later
@@ -147,11 +156,12 @@
> eol=!
> EOF
$ hg blackbox -l 6
- 1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> update
+ 1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> update (no-chg !)
1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> writing .hg/cache/tags2-visible with 0 tags
1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> pythonhook-preupdate: hgext.eol.preupdate finished in * seconds (glob)
1970/01/01 00:00:00 bob @d02f48003e62c24e2659d97d30f2a83abe5d5d51 (5000)> exthook-update: echo hooked finished in * seconds (glob)
1970/01/01 00:00:00 bob @d02f48003e62c24e2659d97d30f2a83abe5d5d51 (5000)> update exited 0 after * seconds (glob)
+ 1970/01/01 00:00:00 bob @d02f48003e62c24e2659d97d30f2a83abe5d5d51 (5000)> serve --cmdserver chgunix --address $TESTTMP.chgsock/server.* --daemon-postexec 'chdir:/' (glob) (chg !)
1970/01/01 00:00:00 bob @d02f48003e62c24e2659d97d30f2a83abe5d5d51 (5000)> blackbox -l 6
log rotation
@@ -173,6 +183,7 @@
$ hg init blackboxtest3
$ cd blackboxtest3
$ hg blackbox
+ 1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000 (5000)> init blackboxtest3 exited 0 after * seconds (glob)
1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000 (5000)> blackbox
$ mv .hg/blackbox.log .hg/blackbox.log-
$ mkdir .hg/blackbox.log
@@ -229,3 +240,102 @@
cleanup
$ cd ..
+
+#if chg
+
+when using chg, blackbox.log should get rotated correctly
+
+ $ cat > $TESTTMP/noop.py << EOF
+ > from __future__ import absolute_import
+ > import time
+ > from mercurial import registrar, scmutil
+ > cmdtable = {}
+ > command = registrar.command(cmdtable)
+ > @command('noop')
+ > def noop(ui, repo):
+ > pass
+ > EOF
+
+ $ hg init blackbox-chg
+ $ cd blackbox-chg
+
+ $ cat > .hg/hgrc << EOF
+ > [blackbox]
+ > maxsize = 500B
+ > [extensions]
+ > # extension change forces chg to restart
+ > noop=$TESTTMP/noop.py
+ > EOF
+
+ $ $PYTHON -c 'print("a" * 400)' > .hg/blackbox.log
+ $ chg noop
+ $ chg noop
+ $ chg noop
+ $ chg noop
+ $ chg noop
+
+ $ cat > showsize.py << 'EOF'
+ > import os, sys
+ > limit = 500
+ > for p in sys.argv[1:]:
+ > size = os.stat(p).st_size
+ > if size >= limit:
+ > desc = '>='
+ > else:
+ > desc = '<'
+ > print('%s: %s %d' % (p, desc, limit))
+ > EOF
+
+ $ $PYTHON showsize.py .hg/blackbox*
+ .hg/blackbox.log: < 500
+ .hg/blackbox.log.1: >= 500
+ .hg/blackbox.log.2: >= 500
+
+ $ cd ..
+
+With chg, blackbox should not create the log file if the repo is gone
+
+ $ hg init repo1
+ $ hg --config extensions.a=! -R repo1 log
+ $ rm -rf $TESTTMP/repo1
+ $ hg --config extensions.a=! init repo1
+
+#endif
+
+blackbox should work if repo.ui.log is not called (issue5518)
+
+ $ cat > $TESTTMP/raise.py << EOF
+ > from __future__ import absolute_import
+ > from mercurial import registrar, scmutil
+ > cmdtable = {}
+ > command = registrar.command(cmdtable)
+ > @command('raise')
+ > def raisecmd(*args):
+ > raise RuntimeError('raise')
+ > EOF
+
+ $ cat >> $HGRCPATH << EOF
+ > [blackbox]
+ > track = commandexception
+ > [extensions]
+ > raise=$TESTTMP/raise.py
+ > EOF
+
+ $ hg init $TESTTMP/blackbox-exception-only
+ $ cd $TESTTMP/blackbox-exception-only
+
+#if chg
+ (chg exits 255 because it fails to receive an exit code)
+ $ hg raise 2>/dev/null
+ [255]
+#else
+ (hg exits 1 because Python default exit code for uncaught exception is 1)
+ $ hg raise 2>/dev/null
+ [1]
+#endif
+
+ $ head -1 .hg/blackbox.log
+ 1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000 (5000)> ** Unknown exception encountered with possibly-broken third-party extension mock
+ $ tail -2 .hg/blackbox.log
+ RuntimeError: raise
+
--- a/tests/test-bookmarks-pushpull.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-bookmarks-pushpull.t Sat Sep 30 07:52:48 2017 -0700
@@ -6,7 +6,7 @@
> [phases]
> publish=False
> [experimental]
- > evolution=createmarkers,exchange
+ > stabilization=createmarkers,exchange
> EOF
initialize
--- a/tests/test-bookmarks-rebase.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-bookmarks-rebase.t Sat Sep 30 07:52:48 2017 -0700
@@ -37,7 +37,7 @@
rebase
$ hg rebase -s two -d one
- rebasing 3:2ae46b1d99a7 "3" (tip two)
+ rebasing 3:2ae46b1d99a7 "3" (two tip)
saved backup bundle to $TESTTMP/.hg/strip-backup/2ae46b1d99a7-e6b057bc-rebase.hg (glob)
$ hg log
@@ -77,7 +77,7 @@
created new head
$ hg bookmark three
$ hg rebase -s three -d two
- rebasing 4:dd7c838e8362 "4" (tip three)
+ rebasing 4:dd7c838e8362 "4" (three tip)
merging d
warning: conflicts while merging d! (edit, then use 'hg resolve --mark')
unresolved conflicts (see hg resolve, then hg rebase --continue)
@@ -92,7 +92,7 @@
after aborted rebase, restoring a bookmark that has been removed should not fail
$ hg rebase -s three -d two
- rebasing 4:dd7c838e8362 "4" (tip three)
+ rebasing 4:dd7c838e8362 "4" (three tip)
merging d
warning: conflicts while merging d! (edit, then use 'hg resolve --mark')
unresolved conflicts (see hg resolve, then hg rebase --continue)
--- a/tests/test-bookmarks.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-bookmarks.t Sat Sep 30 07:52:48 2017 -0700
@@ -191,6 +191,48 @@
$ hg bookmark -f -m X Y
+rename bookmark using .
+
+ $ hg book rename-me
+ $ hg book -m . renamed
+ $ hg bookmark
+ X2 1:925d80f479bb
+ Y 2:db815d6d32e6
+ Z 0:f7b1eb17ad24
+ * renamed 2:db815d6d32e6
+ $ hg up -q Y
+ $ hg book -d renamed
+
+rename bookmark using . with no active bookmark
+
+ $ hg book rename-me
+ $ hg book -i rename-me
+ $ hg book -m . renamed
+ abort: no active bookmark
+ [255]
+ $ hg up -q Y
+ $ hg book -d rename-me
+
+delete bookmark using .
+
+ $ hg book delete-me
+ $ hg book -d .
+ $ hg bookmark
+ X2 1:925d80f479bb
+ Y 2:db815d6d32e6
+ Z 0:f7b1eb17ad24
+ $ hg up -q Y
+
+delete bookmark using . with no active bookmark
+
+ $ hg book delete-me
+ $ hg book -i delete-me
+ $ hg book -d .
+ abort: no active bookmark
+ [255]
+ $ hg up -q Y
+ $ hg book -d delete-me
+
list bookmarks
$ hg bookmark
@@ -906,8 +948,10 @@
$ echo a > a
$ cat > $TESTTMP/pausefinalize.py <<EOF
+ > from __future__ import absolute_import
+ > import os
+ > import time
> from mercurial import extensions, localrepo
- > import os, time
> def transaction(orig, self, desc, report=None):
> tr = orig(self, desc, report)
> def sleep(*args, **kwargs):
--- a/tests/test-branches.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-branches.t Sat Sep 30 07:52:48 2017 -0700
@@ -418,6 +418,131 @@
date: Thu Jan 01 00:00:09 1970 +0000
summary: prune bad branch
+
+reclose branch
+
+ $ hg up -C c
+ 3 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ $ hg commit -d '9 0' --close-branch -m 'reclosing this branch'
+ $ hg branches
+ b 13:e23b5505d1ad
+ a branch name much longer than the default justification used by branches 7:10ff5895aa57
+ a 5:d8cbc61dbaa6 (inactive)
+ default 0:19709c5a4e75 (inactive)
+ $ hg branches --closed
+ b 13:e23b5505d1ad
+ a branch name much longer than the default justification used by branches 7:10ff5895aa57
+ c 14:f894c25619d3 (closed)
+ a 5:d8cbc61dbaa6 (inactive)
+ default 0:19709c5a4e75 (inactive)
+
+multihead branch
+
+ $ hg up -C default
+ 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
+ $ hg branch m
+ marked working directory as branch m
+ $ touch m
+ $ hg add m
+ $ hg commit -d '10 0' -m 'multihead base'
+ $ echo "m1" >m
+ $ hg commit -d '10 0' -m 'head 1'
+ $ hg up -C '.^'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo "m2" >m
+ $ hg commit -d '10 0' -m 'head 2'
+ created new head
+ $ hg log -b m
+ changeset: 17:df343b0df04f
+ branch: m
+ tag: tip
+ parent: 15:f3447637f53e
+ user: test
+ date: Thu Jan 01 00:00:10 1970 +0000
+ summary: head 2
+
+ changeset: 16:a58ca5d3bdf3
+ branch: m
+ user: test
+ date: Thu Jan 01 00:00:10 1970 +0000
+ summary: head 1
+
+ changeset: 15:f3447637f53e
+ branch: m
+ parent: 0:19709c5a4e75
+ user: test
+ date: Thu Jan 01 00:00:10 1970 +0000
+ summary: multihead base
+
+ $ hg heads --topo m
+ changeset: 17:df343b0df04f
+ branch: m
+ tag: tip
+ parent: 15:f3447637f53e
+ user: test
+ date: Thu Jan 01 00:00:10 1970 +0000
+ summary: head 2
+
+ changeset: 16:a58ca5d3bdf3
+ branch: m
+ user: test
+ date: Thu Jan 01 00:00:10 1970 +0000
+ summary: head 1
+
+ $ hg branches
+ m 17:df343b0df04f
+ b 13:e23b5505d1ad
+ a branch name much longer than the default justification used by branches 7:10ff5895aa57
+ a 5:d8cbc61dbaa6 (inactive)
+ default 0:19709c5a4e75 (inactive)
+
+partially merge multihead branch
+
+ $ hg up -C default
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg branch md
+ marked working directory as branch md
+ $ hg merge m
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ hg commit -d '11 0' -m 'merge head 2'
+ $ hg heads --topo m
+ changeset: 16:a58ca5d3bdf3
+ branch: m
+ user: test
+ date: Thu Jan 01 00:00:10 1970 +0000
+ summary: head 1
+
+ $ hg branches
+ md 18:c914c99f1fbb
+ m 17:df343b0df04f
+ b 13:e23b5505d1ad
+ a branch name much longer than the default justification used by branches 7:10ff5895aa57
+ a 5:d8cbc61dbaa6 (inactive)
+ default 0:19709c5a4e75 (inactive)
+
+partially close multihead branch
+
+ $ hg up -C a58ca5d3bdf3
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg commit -d '12 0' -m 'close head 1' --close-branch
+ $ hg heads --topo m
+ changeset: 19:cd21a80baa3d
+ branch: m
+ tag: tip
+ parent: 16:a58ca5d3bdf3
+ user: test
+ date: Thu Jan 01 00:00:12 1970 +0000
+ summary: close head 1
+
+ $ hg branches
+ md 18:c914c99f1fbb
+ b 13:e23b5505d1ad
+ a branch name much longer than the default justification used by branches 7:10ff5895aa57
+ m 17:df343b0df04f (inactive)
+ a 5:d8cbc61dbaa6 (inactive)
+ default 0:19709c5a4e75 (inactive)
+
default branch colors:
$ cat <<EOF >> $HGRCPATH
@@ -427,22 +552,23 @@
> mode = ansi
> EOF
- $ hg up -C c
- 3 files updated, 0 files merged, 2 files removed, 0 files unresolved
- $ hg commit -d '9 0' --close-branch -m 'reclosing this branch'
$ hg up -C b
- 2 files updated, 0 files merged, 3 files removed, 0 files unresolved
+ 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ hg branches --color=always
+ \x1b[0;0mmd\x1b[0m\x1b[0;33m 18:c914c99f1fbb\x1b[0m (esc)
\x1b[0;32mb\x1b[0m\x1b[0;33m 13:e23b5505d1ad\x1b[0m (esc)
\x1b[0;0ma branch name much longer than the default justification used by branches\x1b[0m\x1b[0;33m 7:10ff5895aa57\x1b[0m (esc)
+ \x1b[0;0mm\x1b[0m\x1b[0;33m 17:df343b0df04f\x1b[0m (inactive) (esc)
\x1b[0;0ma\x1b[0m\x1b[0;33m 5:d8cbc61dbaa6\x1b[0m (inactive) (esc)
\x1b[0;0mdefault\x1b[0m\x1b[0;33m 0:19709c5a4e75\x1b[0m (inactive) (esc)
default closed branch color:
$ hg branches --color=always --closed
+ \x1b[0;0mmd\x1b[0m\x1b[0;33m 18:c914c99f1fbb\x1b[0m (esc)
\x1b[0;32mb\x1b[0m\x1b[0;33m 13:e23b5505d1ad\x1b[0m (esc)
\x1b[0;0ma branch name much longer than the default justification used by branches\x1b[0m\x1b[0;33m 7:10ff5895aa57\x1b[0m (esc)
+ \x1b[0;0mm\x1b[0m\x1b[0;33m 17:df343b0df04f\x1b[0m (inactive) (esc)
\x1b[0;30;1mc\x1b[0m\x1b[0;33m 14:f894c25619d3\x1b[0m (closed) (esc)
\x1b[0;0ma\x1b[0m\x1b[0;33m 5:d8cbc61dbaa6\x1b[0m (inactive) (esc)
\x1b[0;0mdefault\x1b[0m\x1b[0;33m 0:19709c5a4e75\x1b[0m (inactive) (esc)
@@ -461,16 +587,20 @@
custom branch colors:
$ hg branches --color=always
+ \x1b[0;32mmd\x1b[0m\x1b[0;36m 18:c914c99f1fbb\x1b[0m (esc)
\x1b[0;31mb\x1b[0m\x1b[0;36m 13:e23b5505d1ad\x1b[0m (esc)
\x1b[0;32ma branch name much longer than the default justification used by branches\x1b[0m\x1b[0;36m 7:10ff5895aa57\x1b[0m (esc)
+ \x1b[0;35mm\x1b[0m\x1b[0;36m 17:df343b0df04f\x1b[0m (inactive) (esc)
\x1b[0;35ma\x1b[0m\x1b[0;36m 5:d8cbc61dbaa6\x1b[0m (inactive) (esc)
\x1b[0;35mdefault\x1b[0m\x1b[0;36m 0:19709c5a4e75\x1b[0m (inactive) (esc)
custom closed branch color:
$ hg branches --color=always --closed
+ \x1b[0;32mmd\x1b[0m\x1b[0;36m 18:c914c99f1fbb\x1b[0m (esc)
\x1b[0;31mb\x1b[0m\x1b[0;36m 13:e23b5505d1ad\x1b[0m (esc)
\x1b[0;32ma branch name much longer than the default justification used by branches\x1b[0m\x1b[0;36m 7:10ff5895aa57\x1b[0m (esc)
+ \x1b[0;35mm\x1b[0m\x1b[0;36m 17:df343b0df04f\x1b[0m (inactive) (esc)
\x1b[0;34mc\x1b[0m\x1b[0;36m 14:f894c25619d3\x1b[0m (closed) (esc)
\x1b[0;35ma\x1b[0m\x1b[0;36m 5:d8cbc61dbaa6\x1b[0m (inactive) (esc)
\x1b[0;35mdefault\x1b[0m\x1b[0;36m 0:19709c5a4e75\x1b[0m (inactive) (esc)
@@ -481,6 +611,14 @@
[
{
"active": true,
+ "branch": "md",
+ "closed": false,
+ "current": false,
+ "node": "c914c99f1fbb2b1d785a0a939ed3f67275df18e9",
+ "rev": 18
+ },
+ {
+ "active": true,
"branch": "b",
"closed": false,
"current": true,
@@ -497,6 +635,14 @@
},
{
"active": false,
+ "branch": "m",
+ "closed": false,
+ "current": false,
+ "node": "df343b0df04feb2a946cd4b6e9520e552fef14ee",
+ "rev": 17
+ },
+ {
+ "active": false,
"branch": "c",
"closed": true,
"current": false,
@@ -525,8 +671,10 @@
c
$ hg branches -T '{word(0, branch)}: {desc|firstline}\n'
+ md: merge head 2
b: reopen branch with a change
a: Adding d branch
+ m: head 2
a: Adding b branch head 2
default: Adding root node
@@ -538,8 +686,10 @@
> EOF
$ hg branches -T "$TESTTMP/map-myjson"
{
+ {"branch": "md", "node": "c914c99f1fbb"},
{"branch": "b", "node": "e23b5505d1ad"},
{"branch": "a branch *", "node": "10ff5895aa57"}, (glob)
+ {"branch": "m", "node": "df343b0df04f"},
{"branch": "a", "node": "d8cbc61dbaa6"},
{"branch": "default", "node": "19709c5a4e75"}
}
@@ -553,8 +703,10 @@
> EOF
$ hg branches -T myjson
{
+ {"branch": "md", "node": "c914c99f1fbb"},
{"branch": "b", "node": "e23b5505d1ad"},
{"branch": "a branch *", "node": "10ff5895aa57"}, (glob)
+ {"branch": "m", "node": "df343b0df04f"},
{"branch": "a", "node": "d8cbc61dbaa6"},
{"branch": "default", "node": "19709c5a4e75"}
}
@@ -564,8 +716,10 @@
> :docheader = 'should not be selected as a docheader for literal templates\n'
> EOF
$ hg branches -T '{branch}\n'
+ md
b
a branch name much longer than the default justification used by branches
+ m
a
default
@@ -579,14 +733,14 @@
$ rm -rf .hg/cache; hg head a -T '{rev}\n'
5
$ f --hexdump --size .hg/cache/rbc-*
- .hg/cache/rbc-names-v1: size=87
+ .hg/cache/rbc-names-v1: size=92
0000: 64 65 66 61 75 6c 74 00 61 00 62 00 63 00 61 20 |default.a.b.c.a |
0010: 62 72 61 6e 63 68 20 6e 61 6d 65 20 6d 75 63 68 |branch name much|
0020: 20 6c 6f 6e 67 65 72 20 74 68 61 6e 20 74 68 65 | longer than the|
0030: 20 64 65 66 61 75 6c 74 20 6a 75 73 74 69 66 69 | default justifi|
0040: 63 61 74 69 6f 6e 20 75 73 65 64 20 62 79 20 62 |cation used by b|
- 0050: 72 61 6e 63 68 65 73 |ranches|
- .hg/cache/rbc-revs-v1: size=120
+ 0050: 72 61 6e 63 68 65 73 00 6d 00 6d 64 |ranches.m.md|
+ .hg/cache/rbc-revs-v1: size=160
0000: 19 70 9c 5a 00 00 00 00 dd 6b 44 0d 00 00 00 01 |.p.Z.....kD.....|
0010: 88 1f e2 b9 00 00 00 01 ac 22 03 33 00 00 00 02 |.........".3....|
0020: ae e3 9c d1 00 00 00 02 d8 cb c6 1d 00 00 00 01 |................|
@@ -594,7 +748,9 @@
0040: ee bb 94 44 00 00 00 02 5f 40 61 bb 00 00 00 02 |...D...._@a.....|
0050: bf be 84 1b 00 00 00 02 d3 f1 63 45 80 00 00 02 |..........cE....|
0060: e3 d4 9c 05 80 00 00 02 e2 3b 55 05 00 00 00 02 |.........;U.....|
- 0070: f8 94 c2 56 80 00 00 03 |...V....|
+ 0070: f8 94 c2 56 80 00 00 03 f3 44 76 37 00 00 00 05 |...V.....Dv7....|
+ 0080: a5 8c a5 d3 00 00 00 05 df 34 3b 0d 00 00 00 05 |.........4;.....|
+ 0090: c9 14 c9 9f 00 00 00 06 cd 21 a8 0b 80 00 00 05 |.........!......|
no errors when revbranchcache is not writable
@@ -622,9 +778,9 @@
$ echo >> .hg/cache/rbc-revs-v1
$ rm -f .hg/cache/branch* && hg head a -T '{rev}\n' --debug
5
- truncating cache/rbc-revs-v1 to 120
+ truncating cache/rbc-revs-v1 to 160
$ f --size .hg/cache/rbc-revs*
- .hg/cache/rbc-revs-v1: size=120
+ .hg/cache/rbc-revs-v1: size=160
recovery from invalid cache file with partial last record
$ mv .hg/cache/rbc-revs-v1 .
$ f -qDB 119 rbc-revs-v1 > .hg/cache/rbc-revs-v1
@@ -634,14 +790,14 @@
5
truncating cache/rbc-revs-v1 to 112
$ f --size .hg/cache/rbc-revs*
- .hg/cache/rbc-revs-v1: size=120
+ .hg/cache/rbc-revs-v1: size=160
recovery from invalid cache file with missing record - no truncation
$ mv .hg/cache/rbc-revs-v1 .
$ f -qDB 112 rbc-revs-v1 > .hg/cache/rbc-revs-v1
$ rm -f .hg/cache/branch* && hg head a -T '{rev}\n' --debug
5
$ f --size .hg/cache/rbc-revs*
- .hg/cache/rbc-revs-v1: size=120
+ .hg/cache/rbc-revs-v1: size=160
recovery from invalid cache file with some bad records
$ mv .hg/cache/rbc-revs-v1 .
$ f -qDB 8 rbc-revs-v1 > .hg/cache/rbc-revs-v1
@@ -658,29 +814,29 @@
5
truncating cache/rbc-revs-v1 to 104
$ f --size --hexdump --bytes=16 .hg/cache/rbc-revs*
- .hg/cache/rbc-revs-v1: size=120
+ .hg/cache/rbc-revs-v1: size=160
0000: 19 70 9c 5a 00 00 00 00 dd 6b 44 0d 00 00 00 01 |.p.Z.....kD.....|
cache is updated when committing
$ hg branch i-will-regret-this
marked working directory as branch i-will-regret-this
$ hg ci -m regrets
$ f --size .hg/cache/rbc-*
- .hg/cache/rbc-names-v1: size=106
- .hg/cache/rbc-revs-v1: size=128
+ .hg/cache/rbc-names-v1: size=111
+ .hg/cache/rbc-revs-v1: size=168
update after rollback - the cache will be correct but rbc-names will will still
contain the branch name even though it no longer is used
$ hg up -qr '.^'
$ hg rollback -qf
$ f --size --hexdump .hg/cache/rbc-*
- .hg/cache/rbc-names-v1: size=106
+ .hg/cache/rbc-names-v1: size=111
0000: 64 65 66 61 75 6c 74 00 61 00 62 00 63 00 61 20 |default.a.b.c.a |
0010: 62 72 61 6e 63 68 20 6e 61 6d 65 20 6d 75 63 68 |branch name much|
0020: 20 6c 6f 6e 67 65 72 20 74 68 61 6e 20 74 68 65 | longer than the|
0030: 20 64 65 66 61 75 6c 74 20 6a 75 73 74 69 66 69 | default justifi|
0040: 63 61 74 69 6f 6e 20 75 73 65 64 20 62 79 20 62 |cation used by b|
- 0050: 72 61 6e 63 68 65 73 00 69 2d 77 69 6c 6c 2d 72 |ranches.i-will-r|
- 0060: 65 67 72 65 74 2d 74 68 69 73 |egret-this|
- .hg/cache/rbc-revs-v1: size=120
+ 0050: 72 61 6e 63 68 65 73 00 6d 00 6d 64 00 69 2d 77 |ranches.m.md.i-w|
+ 0060: 69 6c 6c 2d 72 65 67 72 65 74 2d 74 68 69 73 |ill-regret-this|
+ .hg/cache/rbc-revs-v1: size=160
0000: 19 70 9c 5a 00 00 00 00 dd 6b 44 0d 00 00 00 01 |.p.Z.....kD.....|
0010: 88 1f e2 b9 00 00 00 01 ac 22 03 33 00 00 00 02 |.........".3....|
0020: ae e3 9c d1 00 00 00 02 d8 cb c6 1d 00 00 00 01 |................|
@@ -688,12 +844,14 @@
0040: ee bb 94 44 00 00 00 02 5f 40 61 bb 00 00 00 02 |...D...._@a.....|
0050: bf be 84 1b 00 00 00 02 d3 f1 63 45 80 00 00 02 |..........cE....|
0060: e3 d4 9c 05 80 00 00 02 e2 3b 55 05 00 00 00 02 |.........;U.....|
- 0070: f8 94 c2 56 80 00 00 03 |...V....|
+ 0070: f8 94 c2 56 80 00 00 03 f3 44 76 37 00 00 00 05 |...V.....Dv7....|
+ 0080: a5 8c a5 d3 00 00 00 05 df 34 3b 0d 00 00 00 05 |.........4;.....|
+ 0090: c9 14 c9 9f 00 00 00 06 cd 21 a8 0b 80 00 00 05 |.........!......|
cache is updated/truncated when stripping - it is thus very hard to get in a
situation where the cache is out of sync and the hash check detects it
$ hg --config extensions.strip= strip -r tip --nob
$ f --size .hg/cache/rbc-revs*
- .hg/cache/rbc-revs-v1: size=112
+ .hg/cache/rbc-revs-v1: size=152
cache is rebuilt when corruption is detected
$ echo > .hg/cache/rbc-names-v1
@@ -701,13 +859,14 @@
referenced branch names not found - rebuilding revision branch cache from scratch
8 9 10 11 12 13 truncating cache/rbc-revs-v1 to 40
$ f --size --hexdump .hg/cache/rbc-*
- .hg/cache/rbc-names-v1: size=79
+ .hg/cache/rbc-names-v1: size=84
0000: 62 00 61 00 63 00 61 20 62 72 61 6e 63 68 20 6e |b.a.c.a branch n|
0010: 61 6d 65 20 6d 75 63 68 20 6c 6f 6e 67 65 72 20 |ame much longer |
0020: 74 68 61 6e 20 74 68 65 20 64 65 66 61 75 6c 74 |than the default|
0030: 20 6a 75 73 74 69 66 69 63 61 74 69 6f 6e 20 75 | justification u|
- 0040: 73 65 64 20 62 79 20 62 72 61 6e 63 68 65 73 |sed by branches|
- .hg/cache/rbc-revs-v1: size=112
+ 0040: 73 65 64 20 62 79 20 62 72 61 6e 63 68 65 73 00 |sed by branches.|
+ 0050: 6d 00 6d 64 |m.md|
+ .hg/cache/rbc-revs-v1: size=152
0000: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
0010: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
0020: 00 00 00 00 00 00 00 00 d8 cb c6 1d 00 00 00 01 |................|
@@ -715,6 +874,9 @@
0040: ee bb 94 44 00 00 00 00 5f 40 61 bb 00 00 00 00 |...D...._@a.....|
0050: bf be 84 1b 00 00 00 00 d3 f1 63 45 80 00 00 00 |..........cE....|
0060: e3 d4 9c 05 80 00 00 00 e2 3b 55 05 00 00 00 00 |.........;U.....|
+ 0070: f8 94 c2 56 80 00 00 02 f3 44 76 37 00 00 00 04 |...V.....Dv7....|
+ 0080: a5 8c a5 d3 00 00 00 04 df 34 3b 0d 00 00 00 04 |.........4;.....|
+ 0090: c9 14 c9 9f 00 00 00 05 |........|
Test that cache files are created and grows correctly:
@@ -724,7 +886,7 @@
$ f --size --hexdump .hg/cache/rbc-*
.hg/cache/rbc-names-v1: size=1
0000: 61 |a|
- .hg/cache/rbc-revs-v1: size=112
+ .hg/cache/rbc-revs-v1: size=152
0000: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
0010: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
0020: 00 00 00 00 00 00 00 00 d8 cb c6 1d 00 00 00 00 |................|
@@ -732,6 +894,9 @@
0040: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
0050: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
0060: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
+ 0070: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
+ 0080: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
+ 0090: 00 00 00 00 00 00 00 00 |........|
$ cd ..
--- a/tests/test-bundle-phases.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-bundle-phases.t Sat Sep 30 07:52:48 2017 -0700
@@ -37,12 +37,12 @@
$ hg bundle --base B -r E bundle
3 changesets found
$ hg debugbundle bundle
- Stream params: sortdict([('Compression', 'BZ')])
- changegroup -- "sortdict([('version', '02'), ('nbchanges', '3'), ('targetphase', '2')])"
+ Stream params: {Compression: BZ}
+ changegroup -- {nbchanges: 3, targetphase: 2, version: 02}
26805aba1e600a82e93661149f2313866a221a7b
f585351a92f85104bff7c284233c338b10eb1df7
9bc730a19041f9ec7cb33c626e811aa233efb18c
- phase-heads -- 'sortdict()'
+ phase-heads -- {}
26805aba1e600a82e93661149f2313866a221a7b draft
$ hg strip --no-backup C
$ hg unbundle -q bundle
@@ -226,14 +226,14 @@
$ hg bundle -a bundle
5 changesets found
$ hg debugbundle bundle
- Stream params: sortdict([('Compression', 'BZ')])
- changegroup -- "sortdict([('version', '02'), ('nbchanges', '5'), ('targetphase', '2')])"
+ Stream params: {Compression: BZ}
+ changegroup -- {nbchanges: 5, targetphase: 2, version: 02}
426bada5c67598ca65036d57d9e4b64b0c1ce7a0
112478962961147124edd43549aedd1a335e44bf
dc0947a82db884575bb76ea10ac97b08536bfa03
4e4f9194f9f181c57f62e823e8bdfa46ab9e4ff4
03ca77807e919db8807c3749086dc36fb478cac0
- phase-heads -- 'sortdict()'
+ phase-heads -- {}
dc0947a82db884575bb76ea10ac97b08536bfa03 public
03ca77807e919db8807c3749086dc36fb478cac0 draft
$ hg strip --no-backup A
@@ -254,32 +254,32 @@
$ hg bundle --base 'A + C' -r D bundle
2 changesets found
$ hg debugbundle bundle
- Stream params: sortdict([('Compression', 'BZ')])
- changegroup -- "sortdict([('version', '02'), ('nbchanges', '2'), ('targetphase', '2')])"
+ Stream params: {Compression: BZ}
+ changegroup -- {nbchanges: 2, targetphase: 2, version: 02}
112478962961147124edd43549aedd1a335e44bf
4e4f9194f9f181c57f62e823e8bdfa46ab9e4ff4
- phase-heads -- 'sortdict()'
+ phase-heads -- {}
$ rm bundle
$ hg bundle --base A -r D bundle
3 changesets found
$ hg debugbundle bundle
- Stream params: sortdict([('Compression', 'BZ')])
- changegroup -- "sortdict([('version', '02'), ('nbchanges', '3'), ('targetphase', '2')])"
+ Stream params: {Compression: BZ}
+ changegroup -- {nbchanges: 3, targetphase: 2, version: 02}
112478962961147124edd43549aedd1a335e44bf
dc0947a82db884575bb76ea10ac97b08536bfa03
4e4f9194f9f181c57f62e823e8bdfa46ab9e4ff4
- phase-heads -- 'sortdict()'
+ phase-heads -- {}
dc0947a82db884575bb76ea10ac97b08536bfa03 public
$ rm bundle
$ hg bundle --base 'B + C' -r 'D + E' bundle
2 changesets found
$ hg debugbundle bundle
- Stream params: sortdict([('Compression', 'BZ')])
- changegroup -- "sortdict([('version', '02'), ('nbchanges', '2'), ('targetphase', '2')])"
+ Stream params: {Compression: BZ}
+ changegroup -- {nbchanges: 2, targetphase: 2, version: 02}
4e4f9194f9f181c57f62e823e8bdfa46ab9e4ff4
03ca77807e919db8807c3749086dc36fb478cac0
- phase-heads -- 'sortdict()'
+ phase-heads -- {}
03ca77807e919db8807c3749086dc36fb478cac0 draft
$ rm bundle
--- a/tests/test-bundle-type.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-bundle-type.t Sat Sep 30 07:52:48 2017 -0700
@@ -73,7 +73,7 @@
1 changesets found
HG20\x00\x00 (esc)
Stream params: {}
- changegroup -- "sortdict([('version', '02'), ('nbchanges', '1')])"
+ changegroup -- {nbchanges: 1, version: 02}
c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf
none-v2
@@ -81,8 +81,8 @@
searching for changes
1 changesets found
HG20\x00\x00 (esc)
- Stream params: sortdict([('Compression', 'BZ')])
- changegroup -- "sortdict([('version', '02'), ('nbchanges', '1')])"
+ Stream params: {Compression: BZ}
+ changegroup -- {nbchanges: 1, version: 02}
c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf
bzip2-v2
@@ -90,8 +90,8 @@
searching for changes
1 changesets found
HG20\x00\x00 (esc)
- Stream params: sortdict([('Compression', 'GZ')])
- changegroup -- "sortdict([('version', '02'), ('nbchanges', '1')])"
+ Stream params: {Compression: GZ}
+ changegroup -- {nbchanges: 1, version: 02}
c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf
gzip-v2
@@ -100,7 +100,7 @@
1 changesets found
HG20\x00\x00 (esc)
Stream params: {}
- changegroup -- "sortdict([('version', '02'), ('nbchanges', '1')])"
+ changegroup -- {nbchanges: 1, version: 02}
c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf
none-v2
@@ -108,8 +108,8 @@
searching for changes
1 changesets found
HG20\x00\x00 (esc)
- Stream params: sortdict([('Compression', 'BZ')])
- changegroup -- "sortdict([('version', '02'), ('nbchanges', '1')])"
+ Stream params: {Compression: BZ}
+ changegroup -- {nbchanges: 1, version: 02}
c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf
bzip2-v2
@@ -167,8 +167,8 @@
searching for changes
1 changesets found
HG20\x00\x00 (esc)
- Stream params: sortdict([('Compression', 'ZS')])
- changegroup -- "sortdict([('version', '02'), ('nbchanges', '1')])"
+ Stream params: {Compression: ZS}
+ changegroup -- {nbchanges: 1, version: 02}
c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf
zstd-v2
@@ -176,8 +176,8 @@
searching for changes
1 changesets found
HG20\x00\x00 (esc)
- Stream params: sortdict([('Compression', 'ZS')])
- changegroup -- "sortdict([('version', '02'), ('nbchanges', '1')])"
+ Stream params: {Compression: ZS}
+ changegroup -- {nbchanges: 1, version: 02}
c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf
zstd-v2
--- a/tests/test-bundle2-exchange.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-bundle2-exchange.t Sat Sep 30 07:52:48 2017 -0700
@@ -15,7 +15,7 @@
$ cat >> $HGRCPATH << EOF
> [experimental]
- > evolution=createmarkers,exchange
+ > stabilization=createmarkers,exchange
> bundle2-output-capture=True
> [ui]
> ssh="$PYTHON" "$TESTDIR/dummyssh"
--- a/tests/test-bundle2-format.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-bundle2-format.t Sat Sep 30 07:52:48 2017 -0700
@@ -12,8 +12,9 @@
> This extension allows detailed testing of the various bundle2 API and
> behaviors.
> """
- >
- > import sys, os, gc
+ > import gc
+ > import os
+ > import sys
> from mercurial import util
> from mercurial import bundle2
> from mercurial import scmutil
@@ -21,6 +22,7 @@
> from mercurial import changegroup
> from mercurial import error
> from mercurial import obsolete
+ > from mercurial import pycompat
> from mercurial import registrar
>
>
@@ -35,74 +37,74 @@
> cmdtable = {}
> command = registrar.command(cmdtable)
>
- > ELEPHANTSSONG = """Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
+ > ELEPHANTSSONG = b"""Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
> Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
> Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko."""
> assert len(ELEPHANTSSONG) == 178 # future test say 178 bytes, trust it.
>
- > @bundle2.parthandler('test:song')
+ > @bundle2.parthandler(b'test:song')
> def songhandler(op, part):
> """handle a "test:song" bundle2 part, printing the lyrics on stdin"""
- > op.ui.write('The choir starts singing:\n')
+ > op.ui.write(b'The choir starts singing:\n')
> verses = 0
- > for line in part.read().split('\n'):
- > op.ui.write(' %s\n' % line)
+ > for line in part.read().split(b'\n'):
+ > op.ui.write(b' %s\n' % line)
> verses += 1
- > op.records.add('song', {'verses': verses})
+ > op.records.add(b'song', {b'verses': verses})
>
- > @bundle2.parthandler('test:ping')
+ > @bundle2.parthandler(b'test:ping')
> def pinghandler(op, part):
- > op.ui.write('received ping request (id %i)\n' % part.id)
- > if op.reply is not None and 'ping-pong' in op.reply.capabilities:
- > op.ui.write_err('replying to ping request (id %i)\n' % part.id)
- > op.reply.newpart('test:pong', [('in-reply-to', str(part.id))],
+ > op.ui.write(b'received ping request (id %i)\n' % part.id)
+ > if op.reply is not None and b'ping-pong' in op.reply.capabilities:
+ > op.ui.write_err(b'replying to ping request (id %i)\n' % part.id)
+ > op.reply.newpart(b'test:pong', [(b'in-reply-to', b'%d' % part.id)],
> mandatory=False)
>
- > @bundle2.parthandler('test:debugreply')
+ > @bundle2.parthandler(b'test:debugreply')
> def debugreply(op, part):
> """print data about the capacity of the bundle reply"""
> if op.reply is None:
- > op.ui.write('debugreply: no reply\n')
+ > op.ui.write(b'debugreply: no reply\n')
> else:
- > op.ui.write('debugreply: capabilities:\n')
+ > op.ui.write(b'debugreply: capabilities:\n')
> for cap in sorted(op.reply.capabilities):
- > op.ui.write('debugreply: %r\n' % cap)
+ > op.ui.write(b"debugreply: '%s'\n" % cap)
> for val in op.reply.capabilities[cap]:
- > op.ui.write('debugreply: %r\n' % val)
+ > op.ui.write(b"debugreply: '%s'\n" % val)
>
> @command(b'bundle2',
- > [('', 'param', [], 'stream level parameter'),
- > ('', 'unknown', False, 'include an unknown mandatory part in the bundle'),
- > ('', 'unknownparams', False, 'include an unknown part parameters in the bundle'),
- > ('', 'parts', False, 'include some arbitrary parts to the bundle'),
- > ('', 'reply', False, 'produce a reply bundle'),
- > ('', 'pushrace', False, 'includes a check:head part with unknown nodes'),
- > ('', 'genraise', False, 'includes a part that raise an exception during generation'),
- > ('', 'timeout', False, 'emulate a timeout during bundle generation'),
- > ('r', 'rev', [], 'includes those changeset in the bundle'),
- > ('', 'compress', '', 'compress the stream'),],
- > '[OUTPUTFILE]')
+ > [(b'', b'param', [], b'stream level parameter'),
+ > (b'', b'unknown', False, b'include an unknown mandatory part in the bundle'),
+ > (b'', b'unknownparams', False, b'include an unknown part parameters in the bundle'),
+ > (b'', b'parts', False, b'include some arbitrary parts to the bundle'),
+ > (b'', b'reply', False, b'produce a reply bundle'),
+ > (b'', b'pushrace', False, b'includes a check:head part with unknown nodes'),
+ > (b'', b'genraise', False, b'includes a part that raise an exception during generation'),
+ > (b'', b'timeout', False, b'emulate a timeout during bundle generation'),
+ > (b'r', b'rev', [], b'includes those changeset in the bundle'),
+ > (b'', b'compress', b'', b'compress the stream'),],
+ > b'[OUTPUTFILE]')
> def cmdbundle2(ui, repo, path=None, **opts):
> """write a bundle2 container on standard output"""
> bundler = bundle2.bundle20(ui)
> for p in opts['param']:
- > p = p.split('=', 1)
+ > p = p.split(b'=', 1)
> try:
> bundler.addparam(*p)
- > except ValueError, exc:
+ > except ValueError as exc:
> raise error.Abort('%s' % exc)
>
> if opts['compress']:
> bundler.setcompression(opts['compress'])
>
> if opts['reply']:
- > capsstring = 'ping-pong\nelephants=babar,celeste\ncity%3D%21=celeste%2Cville'
- > bundler.newpart('replycaps', data=capsstring)
+ > capsstring = b'ping-pong\nelephants=babar,celeste\ncity%3D%21=celeste%2Cville'
+ > bundler.newpart(b'replycaps', data=capsstring)
>
> if opts['pushrace']:
> # also serve to test the assignement of data outside of init
- > part = bundler.newpart('check:heads')
- > part.data = '01234567890123456789'
+ > part = bundler.newpart(b'check:heads')
+ > part.data = b'01234567890123456789'
>
> revs = opts['rev']
> if 'rev' in opts:
@@ -113,45 +115,46 @@
> headmissing = [c.node() for c in repo.set('heads(%ld)', revs)]
> headcommon = [c.node() for c in repo.set('parents(%ld) - %ld', revs, revs)]
> outgoing = discovery.outgoing(repo, headcommon, headmissing)
- > cg = changegroup.getchangegroup(repo, 'test:bundle2', outgoing, None)
- > bundler.newpart('changegroup', data=cg.getchunks(),
+ > cg = changegroup.makechangegroup(repo, outgoing, b'01',
+ > b'test:bundle2')
+ > bundler.newpart(b'changegroup', data=cg.getchunks(),
> mandatory=False)
>
> if opts['parts']:
- > bundler.newpart('test:empty', mandatory=False)
+ > bundler.newpart(b'test:empty', mandatory=False)
> # add a second one to make sure we handle multiple parts
- > bundler.newpart('test:empty', mandatory=False)
- > bundler.newpart('test:song', data=ELEPHANTSSONG, mandatory=False)
- > bundler.newpart('test:debugreply', mandatory=False)
- > mathpart = bundler.newpart('test:math')
- > mathpart.addparam('pi', '3.14')
- > mathpart.addparam('e', '2.72')
- > mathpart.addparam('cooking', 'raw', mandatory=False)
- > mathpart.data = '42'
+ > bundler.newpart(b'test:empty', mandatory=False)
+ > bundler.newpart(b'test:song', data=ELEPHANTSSONG, mandatory=False)
+ > bundler.newpart(b'test:debugreply', mandatory=False)
+ > mathpart = bundler.newpart(b'test:math')
+ > mathpart.addparam(b'pi', b'3.14')
+ > mathpart.addparam(b'e', b'2.72')
+ > mathpart.addparam(b'cooking', b'raw', mandatory=False)
+ > mathpart.data = b'42'
> mathpart.mandatory = False
> # advisory known part with unknown mandatory param
- > bundler.newpart('test:song', [('randomparam','')], mandatory=False)
+ > bundler.newpart(b'test:song', [(b'randomparam', b'')], mandatory=False)
> if opts['unknown']:
- > bundler.newpart('test:unknown', data='some random content')
+ > bundler.newpart(b'test:unknown', data=b'some random content')
> if opts['unknownparams']:
- > bundler.newpart('test:song', [('randomparams', '')])
+ > bundler.newpart(b'test:song', [(b'randomparams', b'')])
> if opts['parts']:
- > bundler.newpart('test:ping', mandatory=False)
+ > bundler.newpart(b'test:ping', mandatory=False)
> if opts['genraise']:
> def genraise():
- > yield 'first line\n'
+ > yield b'first line\n'
> raise RuntimeError('Someone set up us the bomb!')
- > bundler.newpart('output', data=genraise(), mandatory=False)
+ > bundler.newpart(b'output', data=genraise(), mandatory=False)
>
> if path is None:
- > file = sys.stdout
+ > file = pycompat.stdout
> else:
> file = open(path, 'wb')
>
> if opts['timeout']:
- > bundler.newpart('test:song', data=ELEPHANTSSONG, mandatory=False)
+ > bundler.newpart(b'test:song', data=ELEPHANTSSONG, mandatory=False)
> for idx, junk in enumerate(bundler.getchunks()):
- > ui.write('%d chunk\n' % idx)
+ > ui.write(b'%d chunk\n' % idx)
> if idx > 4:
> # This throws a GeneratorExit inside the generator, which
> # can cause problems if the exception-recovery code is
@@ -159,75 +162,75 @@
> # occur while we're in the middle of a part.
> break
> gc.collect()
- > ui.write('fake timeout complete.\n')
+ > ui.write(b'fake timeout complete.\n')
> return
> try:
> for chunk in bundler.getchunks():
> file.write(chunk)
- > except RuntimeError, exc:
+ > except RuntimeError as exc:
> raise error.Abort(exc)
> finally:
> file.flush()
>
- > @command(b'unbundle2', [], '')
+ > @command(b'unbundle2', [], b'')
> def cmdunbundle2(ui, repo, replypath=None):
> """process a bundle2 stream from stdin on the current repo"""
> try:
> tr = None
> lock = repo.lock()
- > tr = repo.transaction('processbundle')
+ > tr = repo.transaction(b'processbundle')
> try:
- > unbundler = bundle2.getunbundler(ui, sys.stdin)
+ > unbundler = bundle2.getunbundler(ui, pycompat.stdin)
> op = bundle2.processbundle(repo, unbundler, lambda: tr)
> tr.close()
- > except error.BundleValueError, exc:
+ > except error.BundleValueError as exc:
> raise error.Abort('missing support for %s' % exc)
- > except error.PushRaced, exc:
+ > except error.PushRaced as exc:
> raise error.Abort('push race: %s' % exc)
> finally:
> if tr is not None:
> tr.release()
> lock.release()
- > remains = sys.stdin.read()
- > ui.write('%i unread bytes\n' % len(remains))
- > if op.records['song']:
- > totalverses = sum(r['verses'] for r in op.records['song'])
- > ui.write('%i total verses sung\n' % totalverses)
- > for rec in op.records['changegroup']:
- > ui.write('addchangegroup return: %i\n' % rec['return'])
+ > remains = pycompat.stdin.read()
+ > ui.write(b'%i unread bytes\n' % len(remains))
+ > if op.records[b'song']:
+ > totalverses = sum(r[b'verses'] for r in op.records[b'song'])
+ > ui.write(b'%i total verses sung\n' % totalverses)
+ > for rec in op.records[b'changegroup']:
+ > ui.write(b'addchangegroup return: %i\n' % rec[b'return'])
> if op.reply is not None and replypath is not None:
> with open(replypath, 'wb') as file:
> for chunk in op.reply.getchunks():
> file.write(chunk)
>
- > @command(b'statbundle2', [], '')
+ > @command(b'statbundle2', [], b'')
> def cmdstatbundle2(ui, repo):
> """print statistic on the bundle2 container read from stdin"""
- > unbundler = bundle2.getunbundler(ui, sys.stdin)
+ > unbundler = bundle2.getunbundler(ui, pycompat.stdin)
> try:
> params = unbundler.params
- > except error.BundleValueError, exc:
- > raise error.Abort('unknown parameters: %s' % exc)
- > ui.write('options count: %i\n' % len(params))
+ > except error.BundleValueError as exc:
+ > raise error.Abort(b'unknown parameters: %s' % exc)
+ > ui.write(b'options count: %i\n' % len(params))
> for key in sorted(params):
- > ui.write('- %s\n' % key)
+ > ui.write(b'- %s\n' % key)
> value = params[key]
> if value is not None:
- > ui.write(' %s\n' % value)
+ > ui.write(b' %s\n' % value)
> count = 0
> for p in unbundler.iterparts():
> count += 1
- > ui.write(' :%s:\n' % p.type)
- > ui.write(' mandatory: %i\n' % len(p.mandatoryparams))
- > ui.write(' advisory: %i\n' % len(p.advisoryparams))
- > ui.write(' payload: %i bytes\n' % len(p.read()))
- > ui.write('parts count: %i\n' % count)
+ > ui.write(b' :%s:\n' % p.type)
+ > ui.write(b' mandatory: %i\n' % len(p.mandatoryparams))
+ > ui.write(b' advisory: %i\n' % len(p.advisoryparams))
+ > ui.write(b' payload: %i bytes\n' % len(p.read()))
+ > ui.write(b'parts count: %i\n' % count)
> EOF
$ cat >> $HGRCPATH << EOF
> [extensions]
> bundle2=$TESTTMP/bundle2.py
> [experimental]
- > evolution=createmarkers
+ > stabilization=createmarkers
> [ui]
> ssh=$PYTHON "$TESTDIR/dummyssh"
> logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
@@ -408,8 +411,8 @@
$ hg statbundle2 --debug --config progress.debug=true --config devel.bundle2.debug=true < ../out.hg2
bundle2-input: start processing of HG20 stream
bundle2-input: reading bundle2 stream parameters
- bundle2-input: ignoring unknown parameter 'e|! 7/'
- bundle2-input: ignoring unknown parameter 'simple'
+ bundle2-input: ignoring unknown parameter e|! 7/
+ bundle2-input: ignoring unknown parameter simple
options count: 2
- e|! 7/
babar%#==tutu
@@ -432,7 +435,7 @@
bad parameter name
$ hg bundle2 --param 42babar
- abort: non letter first character: '42babar'
+ abort: non letter first character: 42babar
[255]
@@ -649,7 +652,7 @@
bundle2-input: part type: "test:song"
bundle2-input: part id: "2"
bundle2-input: part parameters: 0
- bundle2-input: found a handler for part 'test:song'
+ bundle2-input: found a handler for part test:song
bundle2-input-part: "test:song" (advisory) supported
The choir starts singing:
bundle2-input: payload chunk size: 178
@@ -662,7 +665,7 @@
bundle2-input: part type: "test:debugreply"
bundle2-input: part id: "3"
bundle2-input: part parameters: 0
- bundle2-input: found a handler for part 'test:debugreply'
+ bundle2-input: found a handler for part test:debugreply
bundle2-input-part: "test:debugreply" (advisory) supported
debugreply: no reply
bundle2-input: payload chunk size: 0
@@ -679,15 +682,15 @@
bundle2-input: part type: "test:song"
bundle2-input: part id: "5"
bundle2-input: part parameters: 1
- bundle2-input: found a handler for part 'test:song'
+ bundle2-input: found a handler for part test:song
bundle2-input: ignoring unsupported advisory part test:song - randomparam
- bundle2-input-part: "test:song" (advisory) (params: 1 mandatory) unsupported-params (['randomparam'])
+ bundle2-input-part: "test:song" (advisory) (params: 1 mandatory) unsupported-params (randomparam)
bundle2-input: payload chunk size: 0
bundle2-input: part header size: 16
bundle2-input: part type: "test:ping"
bundle2-input: part id: "6"
bundle2-input: part parameters: 0
- bundle2-input: found a handler for part 'test:ping'
+ bundle2-input: found a handler for part test:ping
bundle2-input-part: "test:ping" (advisory) supported
received ping request (id 6)
bundle2-input: payload chunk size: 0
@@ -989,7 +992,7 @@
$ hg debugbundle ../rev.hg2
Stream params: {}
- changegroup -- 'sortdict()'
+ changegroup -- {}
32af7686d403cf45b5d95f2d70cebea587ac806a
9520eea781bcca16c1e15acc0ba14335a0e8e5ba
eea13746799a9e0bfd88f29d3c2e9dc9389f524f
@@ -1117,8 +1120,8 @@
0360: db fb 6a 33 df c1 7d 99 cf ef d4 d5 6d da 77 7c |..j3..}.....m.w||
0370: 3b 19 fd af c5 3f f1 60 c3 17 |;....?.`..|
$ hg debugbundle ../rev.hg2.bz
- Stream params: sortdict([('Compression', 'GZ')])
- changegroup -- 'sortdict()'
+ Stream params: {Compression: GZ}
+ changegroup -- {}
32af7686d403cf45b5d95f2d70cebea587ac806a
9520eea781bcca16c1e15acc0ba14335a0e8e5ba
eea13746799a9e0bfd88f29d3c2e9dc9389f524f
@@ -1204,8 +1207,8 @@
0420: 8b 43 88 57 9c 01 f5 61 b5 e1 27 41 7e af 83 fe |.C.W...a..'A~...|
0430: 2e e4 8a 70 a1 21 46 96 30 7a |...p.!F.0z|
$ hg debugbundle ../rev.hg2.bz
- Stream params: sortdict([('Compression', 'BZ')])
- changegroup -- 'sortdict()'
+ Stream params: {Compression: BZ}
+ changegroup -- {}
32af7686d403cf45b5d95f2d70cebea587ac806a
9520eea781bcca16c1e15acc0ba14335a0e8e5ba
eea13746799a9e0bfd88f29d3c2e9dc9389f524f
--- a/tests/test-bundle2-multiple-changegroups.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-bundle2-multiple-changegroups.t Sat Sep 30 07:52:48 2017 -0700
@@ -13,13 +13,13 @@
> # in 'heads' as intermediate heads for the first changegroup.
> intermediates = [repo[r].p1().node() for r in heads]
> outgoing = discovery.outgoing(repo, common, intermediates)
- > cg = changegroup.getchangegroup(repo, source, outgoing,
- > bundlecaps=bundlecaps)
+ > cg = changegroup.makechangegroup(repo, outgoing, '01',
+ > source, bundlecaps=bundlecaps)
> bundler.newpart('output', data='changegroup1')
> bundler.newpart('changegroup', data=cg.getchunks())
> outgoing = discovery.outgoing(repo, common + intermediates, heads)
- > cg = changegroup.getchangegroup(repo, source, outgoing,
- > bundlecaps=bundlecaps)
+ > cg = changegroup.makechangegroup(repo, outgoing, '01',
+ > source, bundlecaps=bundlecaps)
> bundler.newpart('output', data='changegroup2')
> bundler.newpart('changegroup', data=cg.getchunks())
>
--- a/tests/test-bundle2-pushback.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-bundle2-pushback.t Sat Sep 30 07:52:48 2017 -0700
@@ -3,7 +3,8 @@
> Current bundle2 implementation doesn't provide a way to generate those
> parts, so they must be created by extensions.
> """
- > from mercurial import bundle2, pushkey, exchange, util
+ > from __future__ import absolute_import
+ > from mercurial import bundle2, exchange, pushkey, util
> def _newhandlechangegroup(op, inpart):
> """This function wraps the changegroup part handler for getbundle.
> It issues an additional pushkey part to send a new
--- a/tests/test-bundle2-remote-changegroup.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-bundle2-remote-changegroup.t Sat Sep 30 07:52:48 2017 -0700
@@ -64,7 +64,8 @@
> common.extend(repo.lookup(r) for r in repo.revs(_common))
> heads = [repo.lookup(r) for r in repo.revs(heads)]
> outgoing = discovery.outgoing(repo, common, heads)
- > cg = changegroup.getchangegroup(repo, 'changegroup', outgoing)
+ > cg = changegroup.makechangegroup(repo, outgoing, '01',
+ > 'changegroup')
> newpart('changegroup', cg.getchunks())
> else:
> raise Exception('unknown verb')
--- a/tests/test-cache-abuse.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-cache-abuse.t Sat Sep 30 07:52:48 2017 -0700
@@ -2,7 +2,7 @@
$ cat >> $HGRCPATH << EOF
> [experimental]
- > evolution=createmarkers
+ > stabilization=createmarkers
> [phases]
> publish=False
> EOF
--- a/tests/test-casecollision-merge.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-casecollision-merge.t Sat Sep 30 07:52:48 2017 -0700
@@ -144,7 +144,7 @@
$ hg commit -m '#4'
$ hg merge
- abort: case-folding collision between a and A
+ abort: case-folding collision between [aA] and [Aa] (re)
[255]
$ hg parents --template '{rev}\n'
4
@@ -157,7 +157,7 @@
$ hg update --clean 2
1 files updated, 0 files merged, 2 files removed, 0 files unresolved
$ hg merge
- abort: case-folding collision between a and A
+ abort: case-folding collision between [aA] and [Aa] (re)
[255]
$ hg parents --template '{rev}\n'
2
@@ -327,7 +327,7 @@
$ hg status
A B
$ hg update
- abort: case-folding collision between b and B
+ abort: case-folding collision between [bB] and [Bb] (re)
[255]
$ hg update --check
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-check-interfaces.py Sat Sep 30 07:52:48 2017 -0700
@@ -0,0 +1,77 @@
+# Test that certain objects conform to well-defined interfaces.
+
+from __future__ import absolute_import, print_function
+
+from mercurial import (
+ bundlerepo,
+ httppeer,
+ localrepo,
+ sshpeer,
+ statichttprepo,
+ ui as uimod,
+ unionrepo,
+)
+
+def checkobject(o):
+ """Verify a constructed object conforms to interface rules.
+
+ An object must have __abstractmethods__ defined.
+
+ All "public" attributes of the object (attributes not prefixed with
+ an underscore) must be in __abstractmethods__ or appear on a base class
+ with __abstractmethods__.
+ """
+ name = o.__class__.__name__
+
+ allowed = set()
+ for cls in o.__class__.__mro__:
+ if not getattr(cls, '__abstractmethods__', set()):
+ continue
+
+ allowed |= cls.__abstractmethods__
+ allowed |= {a for a in dir(cls) if not a.startswith('_')}
+
+ if not allowed:
+ print('%s does not have abstract methods' % name)
+ return
+
+ public = {a for a in dir(o) if not a.startswith('_')}
+
+ for attr in sorted(public - allowed):
+ print('public attributes not in abstract interface: %s.%s' % (
+ name, attr))
+
+# Facilitates testing localpeer.
+class dummyrepo(object):
+ def __init__(self):
+ self.ui = uimod.ui()
+ def filtered(self, name):
+ pass
+ def _restrictcapabilities(self, caps):
+ pass
+
+# Facilitates testing sshpeer without requiring an SSH server.
+class testingsshpeer(sshpeer.sshpeer):
+ def _validaterepo(self, *args, **kwargs):
+ pass
+
+class badpeer(httppeer.httppeer):
+ def __init__(self):
+ super(badpeer, self).__init__(uimod.ui(), 'http://localhost')
+ self.badattribute = True
+
+ def badmethod(self):
+ pass
+
+def main():
+ ui = uimod.ui()
+
+ checkobject(badpeer())
+ checkobject(httppeer.httppeer(ui, 'http://localhost'))
+ checkobject(localrepo.localpeer(dummyrepo()))
+ checkobject(testingsshpeer(ui, 'ssh://localhost/foo'))
+ checkobject(bundlerepo.bundlepeer(dummyrepo()))
+ checkobject(statichttprepo.statichttppeer(dummyrepo()))
+ checkobject(unionrepo.unionpeer(dummyrepo()))
+
+main()
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-check-interfaces.py.out Sat Sep 30 07:52:48 2017 -0700
@@ -0,0 +1,2 @@
+public attributes not in abstract interface: badpeer.badattribute
+public attributes not in abstract interface: badpeer.badmethod
--- a/tests/test-check-module-imports.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-check-module-imports.t Sat Sep 30 07:52:48 2017 -0700
@@ -16,24 +16,28 @@
$ testrepohg locate 'set:**.py or grep(r"^#!.*?python")' \
> 'tests/**.t' \
+ > -X hgweb.cgi \
+ > -X setup.py \
> -X contrib/debugshell.py \
+ > -X contrib/hgweb.fcgi \
> -X contrib/python-zstandard/ \
> -X contrib/win32/hgwebdir_wsgi.py \
> -X doc/gendoc.py \
> -X doc/hgmanpage.py \
> -X i18n/posplit \
+ > -X tests/hypothesishelpers.py \
+ > -X tests/test-commit-interactive.t \
+ > -X tests/test-contrib-check-code.t \
+ > -X tests/test-demandimport.py \
+ > -X tests/test-extension.t \
+ > -X tests/test-hghave.t \
> -X tests/test-hgweb-auth.py \
- > -X tests/hypothesishelpers.py \
- > -X tests/test-lock.py \
- > -X tests/test-verify-repo-operations.py \
+ > -X tests/test-hgweb-no-path-info.t \
+ > -X tests/test-hgweb-no-request-uri.t \
+ > -X tests/test-hgweb-non-interactive.t \
> -X tests/test-hook.t \
> -X tests/test-import.t \
> -X tests/test-imports-checker.t \
- > -X tests/test-commit-interactive.t \
- > -X tests/test-contrib-check-code.t \
- > -X tests/test-extension.t \
- > -X tests/test-hghave.t \
- > -X tests/test-hgweb-no-path-info.t \
- > -X tests/test-hgweb-no-request-uri.t \
- > -X tests/test-hgweb-non-interactive.t \
+ > -X tests/test-lock.py \
+ > -X tests/test-verify-repo-operations.py \
> | sed 's-\\-/-g' | $PYTHON "$import_checker" -
--- a/tests/test-check-py3-compat.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-check-py3-compat.t Sat Sep 30 07:52:48 2017 -0700
@@ -19,9 +19,7 @@
contrib/python-zstandard/tests/test_estimate_sizes.py not using absolute_import
contrib/python-zstandard/tests/test_module_attributes.py not using absolute_import
contrib/python-zstandard/tests/test_train_dictionary.py not using absolute_import
- i18n/check-translation.py not using absolute_import
setup.py not using absolute_import
- tests/test-demandimport.py not using absolute_import
#if py3exe
$ testrepohg files 'set:(**.py) - grep(pygments)' \
--- a/tests/test-chg.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-chg.t Sat Sep 30 07:52:48 2017 -0700
@@ -48,7 +48,7 @@
$ touch foo
$ CHGDEBUG= HGEDITOR=cat chg ci -Am channeled --edit 2>&1 \
> | egrep "HG:|run 'cat"
- chg: debug: run 'cat "*"' at '$TESTTMP/editor' (glob)
+ chg: debug: * run 'cat "*"' at '$TESTTMP/editor' (glob)
HG: Enter commit message. Lines beginning with 'HG:' are removed.
HG: Leave message empty to abort commit.
HG: --
@@ -115,7 +115,9 @@
> EOF
$ cat > $TESTTMP/fakepager.py <<EOF
- > import sys, time
+ > from __future__ import absolute_import
+ > import sys
+ > import time
> for line in iter(sys.stdin.readline, ''):
> if 'crash' in line: # only interested in lines containing 'crash'
> # if chg exits when pager is sleeping (incorrectly), the output
@@ -163,16 +165,16 @@
warm up server:
$ CHGDEBUG= chg log 2>&1 | egrep 'instruction|start'
- chg: debug: start cmdserver at $TESTTMP/extreload/chgsock/server.* (glob)
+ chg: debug: * start cmdserver at $TESTTMP/extreload/chgsock/server.* (glob)
new server should be started if extension modified:
$ sleep 1
$ touch dummyext.py
$ CHGDEBUG= chg log 2>&1 | egrep 'instruction|start'
- chg: debug: instruction: unlink $TESTTMP/extreload/chgsock/server-* (glob)
- chg: debug: instruction: reconnect
- chg: debug: start cmdserver at $TESTTMP/extreload/chgsock/server.* (glob)
+ chg: debug: * instruction: unlink $TESTTMP/extreload/chgsock/server-* (glob)
+ chg: debug: * instruction: reconnect (glob)
+ chg: debug: * start cmdserver at $TESTTMP/extreload/chgsock/server.* (glob)
old server will shut down, while new server should still be reachable:
@@ -194,7 +196,7 @@
(this test makes sure that old server shut down automatically)
$ CHGDEBUG= chg log 2>&1 | egrep 'instruction|start'
- chg: debug: start cmdserver at $TESTTMP/extreload/chgsock/server.* (glob)
+ chg: debug: * start cmdserver at $TESTTMP/extreload/chgsock/server.* (glob)
shut down servers and restore environment:
--- a/tests/test-clone-uncompressed.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-clone-uncompressed.t Sat Sep 30 07:52:48 2017 -0700
@@ -44,8 +44,8 @@
sending getbundle command
bundle2-input-bundle: with-transaction
bundle2-input-part: "listkeys" (params: 1 mandatory) supported
- bundle2-input-part: total payload size 58
- bundle2-input-part: "listkeys" (params: 1 mandatory) supported
+ bundle2-input-part: "phase-heads" supported
+ bundle2-input-part: total payload size 24
bundle2-input-bundle: 1 parts total
checking for updated bookmarks
--- a/tests/test-clone.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-clone.t Sat Sep 30 07:52:48 2017 -0700
@@ -706,7 +706,7 @@
$ cd filteredrev0
$ cat >> .hg/hgrc << EOF
> [experimental]
- > evolution=createmarkers
+ > stabilization=createmarkers
> EOF
$ echo initial1 > foo
$ hg -q commit -A -m initial0
--- a/tests/test-clonebundles.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-clonebundles.t Sat Sep 30 07:52:48 2017 -0700
@@ -32,7 +32,7 @@
$ cat server/access.log
* - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
* - - [*] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- * - - [*] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=aaff8d2ffbbf07a46dd1f05d8ae7877e3f56e2a2&listkeys=phases%2Cbookmarks x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ * - - [*] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=aaff8d2ffbbf07a46dd1f05d8ae7877e3f56e2a2&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=*,zlib,none,bzip2 (glob)
Empty manifest file results in retrieval
(the extension only checks if the manifest file exists)
@@ -431,3 +431,81 @@
finished applying clone bundle
searching for changes
no changes found
+
+Test interaction between clone bundles and --uncompressed
+
+A manifest with just a gzip bundle
+
+ $ cat > server/.hg/clonebundles.manifest << EOF
+ > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2
+ > EOF
+
+ $ hg clone -U --uncompressed http://localhost:$HGPORT uncompressed-gzip
+ no compatible clone bundles available on server; falling back to regular clone
+ (you may want to report this to the server operator)
+ streaming all changes
+ 4 files to transfer, 613 bytes of data
+ transferred 613 bytes in * seconds (*) (glob)
+ searching for changes
+ no changes found
+
+A manifest with a stream clone but no BUNDLESPEC
+
+ $ cat > server/.hg/clonebundles.manifest << EOF
+ > http://localhost:$HGPORT1/packed.hg
+ > EOF
+
+ $ hg clone -U --uncompressed http://localhost:$HGPORT uncompressed-no-bundlespec
+ no compatible clone bundles available on server; falling back to regular clone
+ (you may want to report this to the server operator)
+ streaming all changes
+ 4 files to transfer, 613 bytes of data
+ transferred 613 bytes in * seconds (*) (glob)
+ searching for changes
+ no changes found
+
+A manifest with a gzip bundle and a stream clone
+
+ $ cat > server/.hg/clonebundles.manifest << EOF
+ > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2
+ > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1
+ > EOF
+
+ $ hg clone -U --uncompressed http://localhost:$HGPORT uncompressed-gzip-packed
+ applying clone bundle from http://localhost:$HGPORT1/packed.hg
+ 4 files to transfer, 613 bytes of data
+ transferred 613 bytes in * seconds (*) (glob)
+ finished applying clone bundle
+ searching for changes
+ no changes found
+
+A manifest with a gzip bundle and stream clone with supported requirements
+
+ $ cat > server/.hg/clonebundles.manifest << EOF
+ > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2
+ > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1;requirements%3Drevlogv1
+ > EOF
+
+ $ hg clone -U --uncompressed http://localhost:$HGPORT uncompressed-gzip-packed-requirements
+ applying clone bundle from http://localhost:$HGPORT1/packed.hg
+ 4 files to transfer, 613 bytes of data
+ transferred 613 bytes in * seconds (*) (glob)
+ finished applying clone bundle
+ searching for changes
+ no changes found
+
+A manifest with a gzip bundle and a stream clone with unsupported requirements
+
+ $ cat > server/.hg/clonebundles.manifest << EOF
+ > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2
+ > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1;requirements%3Drevlogv42
+ > EOF
+
+ $ hg clone -U --uncompressed http://localhost:$HGPORT uncompressed-gzip-packed-unsupported-requirements
+ no compatible clone bundles available on server; falling back to regular clone
+ (you may want to report this to the server operator)
+ streaming all changes
+ 4 files to transfer, 613 bytes of data
+ transferred 613 bytes in * seconds (*) (glob)
+ searching for changes
+ no changes found
--- a/tests/test-command-template.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-command-template.t Sat Sep 30 07:52:48 2017 -0700
@@ -41,62 +41,62 @@
$ hg debugtemplate -r0 -v '{5 / 2} {mod(5, 2)}\n'
(template
(/
- ('integer', '5')
- ('integer', '2'))
- ('string', ' ')
+ (integer '5')
+ (integer '2'))
+ (string ' ')
(func
- ('symbol', 'mod')
+ (symbol 'mod')
(list
- ('integer', '5')
- ('integer', '2')))
- ('string', '\n'))
+ (integer '5')
+ (integer '2')))
+ (string '\n'))
2 1
$ hg debugtemplate -r0 -v '{5 / -2} {mod(5, -2)}\n'
(template
(/
- ('integer', '5')
+ (integer '5')
(negate
- ('integer', '2')))
- ('string', ' ')
+ (integer '2')))
+ (string ' ')
(func
- ('symbol', 'mod')
+ (symbol 'mod')
(list
- ('integer', '5')
+ (integer '5')
(negate
- ('integer', '2'))))
- ('string', '\n'))
+ (integer '2'))))
+ (string '\n'))
-3 -1
$ hg debugtemplate -r0 -v '{-5 / 2} {mod(-5, 2)}\n'
(template
(/
(negate
- ('integer', '5'))
- ('integer', '2'))
- ('string', ' ')
+ (integer '5'))
+ (integer '2'))
+ (string ' ')
(func
- ('symbol', 'mod')
+ (symbol 'mod')
(list
(negate
- ('integer', '5'))
- ('integer', '2')))
- ('string', '\n'))
+ (integer '5'))
+ (integer '2')))
+ (string '\n'))
-3 1
$ hg debugtemplate -r0 -v '{-5 / -2} {mod(-5, -2)}\n'
(template
(/
(negate
- ('integer', '5'))
+ (integer '5'))
(negate
- ('integer', '2')))
- ('string', ' ')
+ (integer '2')))
+ (string ' ')
(func
- ('symbol', 'mod')
+ (symbol 'mod')
(list
(negate
- ('integer', '5'))
+ (integer '5'))
(negate
- ('integer', '2'))))
- ('string', '\n'))
+ (integer '2'))))
+ (string '\n'))
2 -1
Filters bind closer than arithmetic:
@@ -106,11 +106,11 @@
(-
(|
(func
- ('symbol', 'revset')
- ('string', '.'))
- ('symbol', 'count'))
- ('integer', '1'))
- ('string', '\n'))
+ (symbol 'revset')
+ (string '.'))
+ (symbol 'count'))
+ (integer '1'))
+ (string '\n'))
0
But negate binds closer still:
@@ -118,31 +118,45 @@
$ hg debugtemplate -r0 -v '{1-3|stringify}\n'
(template
(-
- ('integer', '1')
+ (integer '1')
(|
- ('integer', '3')
- ('symbol', 'stringify')))
- ('string', '\n'))
+ (integer '3')
+ (symbol 'stringify')))
+ (string '\n'))
hg: parse error: arithmetic only defined on integers
[255]
$ hg debugtemplate -r0 -v '{-3|stringify}\n'
(template
(|
(negate
- ('integer', '3'))
- ('symbol', 'stringify'))
- ('string', '\n'))
+ (integer '3'))
+ (symbol 'stringify'))
+ (string '\n'))
-3
+Filters bind as close as map operator:
+
+ $ hg debugtemplate -r0 -v '{desc|splitlines % "{line}\n"}'
+ (template
+ (%
+ (|
+ (symbol 'desc')
+ (symbol 'splitlines'))
+ (template
+ (symbol 'line')
+ (string '\n'))))
+ line 1
+ line 2
+
Keyword arguments:
$ hg debugtemplate -r0 -v '{foo=bar|baz}'
(template
(keyvalue
- ('symbol', 'foo')
+ (symbol 'foo')
(|
- ('symbol', 'bar')
- ('symbol', 'baz'))))
+ (symbol 'bar')
+ (symbol 'baz'))))
hg: parse error: can't use a key-value pair in this context
[255]
@@ -2166,9 +2180,10 @@
$ cd unstable-hash
$ hg log --template '{date|age}\n' > /dev/null || exit 1
- >>> from datetime import datetime, timedelta
+ >>> from __future__ import absolute_import
+ >>> import datetime
>>> fp = open('a', 'w')
- >>> n = datetime.now() + timedelta(366 * 7)
+ >>> n = datetime.datetime.now() + datetime.timedelta(366 * 7)
>>> fp.write('%d-%d-%d 00:00' % (n.year, n.month, n.day))
>>> fp.close()
$ hg add a
@@ -3104,6 +3119,20 @@
hg: parse error: None is not iterable
[255]
+Test new-style inline templating of non-list/dict type:
+
+ $ hg log -R latesttag -r tip -T '{manifest}\n'
+ 11:2bc6e9006ce2
+ $ hg log -R latesttag -r tip -T 'string length: {manifest|count}\n'
+ string length: 15
+ $ hg log -R latesttag -r tip -T '{manifest % "{rev}:{node}"}\n'
+ 11:2bc6e9006ce29882383a22d39fd1f4e66dd3e2fc
+
+Test manifest can be join()-ed as before, though it's silly:
+
+ $ hg log -R latesttag -r tip -T '{join(manifest, "")}\n'
+ 11:2bc6e9006ce2
+
Test the sub function of templating for expansion:
$ hg log -R latesttag -r 10 --template '{sub("[0-9]", "x", "{rev}")}\n'
@@ -3173,21 +3202,21 @@
$ hg debugtemplate -v '{(0)}\n'
(template
(group
- ('integer', '0'))
- ('string', '\n'))
+ (integer '0'))
+ (string '\n'))
0
$ hg debugtemplate -v '{(123)}\n'
(template
(group
- ('integer', '123'))
- ('string', '\n'))
+ (integer '123'))
+ (string '\n'))
123
$ hg debugtemplate -v '{(-4)}\n'
(template
(group
(negate
- ('integer', '4')))
- ('string', '\n'))
+ (integer '4')))
+ (string '\n'))
-4
$ hg debugtemplate '{(-)}\n'
hg: parse error at 3: not a prefix: )
@@ -3200,25 +3229,25 @@
$ hg debugtemplate -D 1=one -v '{1}\n'
(template
- ('integer', '1')
- ('string', '\n'))
+ (integer '1')
+ (string '\n'))
one
$ hg debugtemplate -D 1=one -v '{if("t", "{1}")}\n'
(template
(func
- ('symbol', 'if')
+ (symbol 'if')
(list
- ('string', 't')
+ (string 't')
(template
- ('integer', '1'))))
- ('string', '\n'))
+ (integer '1'))))
+ (string '\n'))
one
$ hg debugtemplate -D 1=one -v '{1|stringify}\n'
(template
(|
- ('integer', '1')
- ('symbol', 'stringify'))
- ('string', '\n'))
+ (integer '1')
+ (symbol 'stringify'))
+ (string '\n'))
one
unless explicit symbol is expected:
@@ -3234,27 +3263,27 @@
$ hg debugtemplate -Ra -r0 -v '{"string with no template fragment"}\n'
(template
- ('string', 'string with no template fragment')
- ('string', '\n'))
+ (string 'string with no template fragment')
+ (string '\n'))
string with no template fragment
$ hg debugtemplate -Ra -r0 -v '{"template: {rev}"}\n'
(template
(template
- ('string', 'template: ')
- ('symbol', 'rev'))
- ('string', '\n'))
+ (string 'template: ')
+ (symbol 'rev'))
+ (string '\n'))
template: 0
$ hg debugtemplate -Ra -r0 -v '{r"rawstring: {rev}"}\n'
(template
- ('string', 'rawstring: {rev}')
- ('string', '\n'))
+ (string 'rawstring: {rev}')
+ (string '\n'))
rawstring: {rev}
$ hg debugtemplate -Ra -r0 -v '{files % r"rawstring: {file}"}\n'
(template
(%
- ('symbol', 'files')
- ('string', 'rawstring: {file}'))
- ('string', '\n'))
+ (symbol 'files')
+ (string 'rawstring: {file}'))
+ (string '\n'))
rawstring: {file}
Test string escaping:
@@ -3664,7 +3693,7 @@
$ cd hashcollision
$ cat <<EOF >> .hg/hgrc
> [experimental]
- > evolution = createmarkers
+ > stabilization = createmarkers
> EOF
$ echo 0 > a
$ hg ci -qAm 0
@@ -4241,49 +4270,49 @@
$ hg debugtemplate -vr0 '{rn} {utcdate(date)|isodate}\n'
(template
- ('symbol', 'rn')
- ('string', ' ')
+ (symbol 'rn')
+ (string ' ')
(|
(func
- ('symbol', 'utcdate')
- ('symbol', 'date'))
- ('symbol', 'isodate'))
- ('string', '\n'))
+ (symbol 'utcdate')
+ (symbol 'date'))
+ (symbol 'isodate'))
+ (string '\n'))
* expanded:
(template
(template
- ('symbol', 'rev')
- ('string', ':')
+ (symbol 'rev')
+ (string ':')
(|
- ('symbol', 'node')
- ('symbol', 'short')))
- ('string', ' ')
+ (symbol 'node')
+ (symbol 'short')))
+ (string ' ')
(|
(func
- ('symbol', 'localdate')
+ (symbol 'localdate')
(list
- ('symbol', 'date')
- ('string', 'UTC')))
- ('symbol', 'isodate'))
- ('string', '\n'))
+ (symbol 'date')
+ (string 'UTC')))
+ (symbol 'isodate'))
+ (string '\n'))
0:1e4e1b8f71e0 1970-01-12 13:46 +0000
$ hg debugtemplate -vr0 '{status("A", file_adds)}'
(template
(func
- ('symbol', 'status')
+ (symbol 'status')
(list
- ('string', 'A')
- ('symbol', 'file_adds'))))
+ (string 'A')
+ (symbol 'file_adds'))))
* expanded:
(template
(%
- ('symbol', 'file_adds')
+ (symbol 'file_adds')
(template
- ('string', 'A')
- ('string', ' ')
- ('symbol', 'file')
- ('string', '\n'))))
+ (string 'A')
+ (string ' ')
+ (symbol 'file')
+ (string '\n'))))
A a
A unary function alias can be called as a filter:
@@ -4292,20 +4321,20 @@
(template
(|
(|
- ('symbol', 'date')
- ('symbol', 'utcdate'))
- ('symbol', 'isodate'))
- ('string', '\n'))
+ (symbol 'date')
+ (symbol 'utcdate'))
+ (symbol 'isodate'))
+ (string '\n'))
* expanded:
(template
(|
(func
- ('symbol', 'localdate')
+ (symbol 'localdate')
(list
- ('symbol', 'date')
- ('string', 'UTC')))
- ('symbol', 'isodate'))
- ('string', '\n'))
+ (symbol 'date')
+ (string 'UTC')))
+ (symbol 'isodate'))
+ (string '\n'))
1970-01-12 13:46 +0000
Aliases should be applied only to command arguments and templates in hgrc.
@@ -4340,7 +4369,7 @@
$ hg debugtemplate --config templatealias.bad='x(' -v '{bad}'
(template
- ('symbol', 'bad'))
+ (symbol 'bad'))
abort: bad definition of template alias "bad": at 2: not a prefix: end
[255]
$ hg log --config templatealias.bad='x(' -T '{bad}'
@@ -4416,3 +4445,155 @@
custom
$ cd ..
+
+Test 'graphwidth' in 'hg log' on various topologies. The key here is that the
+printed graphwidths 3, 5, 7, etc. should all line up in their respective
+columns. We don't care about other aspects of the graph rendering here.
+
+ $ hg init graphwidth
+ $ cd graphwidth
+
+ $ wrappabletext="a a a a a a a a a a a a"
+
+ $ printf "first\n" > file
+ $ hg add file
+ $ hg commit -m "$wrappabletext"
+
+ $ printf "first\nsecond\n" > file
+ $ hg commit -m "$wrappabletext"
+
+ $ hg checkout 0
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ printf "third\nfirst\n" > file
+ $ hg commit -m "$wrappabletext"
+ created new head
+
+ $ hg merge
+ merging file
+ 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+
+ $ hg log --graph -T "{graphwidth}"
+ @ 3
+ |
+ | @ 5
+ |/
+ o 3
+
+ $ hg commit -m "$wrappabletext"
+
+ $ hg log --graph -T "{graphwidth}"
+ @ 5
+ |\
+ | o 5
+ | |
+ o | 5
+ |/
+ o 3
+
+
+ $ hg checkout 0
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ printf "third\nfirst\nsecond\n" > file
+ $ hg commit -m "$wrappabletext"
+ created new head
+
+ $ hg log --graph -T "{graphwidth}"
+ @ 3
+ |
+ | o 7
+ | |\
+ +---o 7
+ | |
+ | o 5
+ |/
+ o 3
+
+
+ $ hg log --graph -T "{graphwidth}" -r 3
+ o 5
+ |\
+ ~ ~
+
+ $ hg log --graph -T "{graphwidth}" -r 1
+ o 3
+ |
+ ~
+
+ $ hg merge
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ hg commit -m "$wrappabletext"
+
+ $ printf "seventh\n" >> file
+ $ hg commit -m "$wrappabletext"
+
+ $ hg log --graph -T "{graphwidth}"
+ @ 3
+ |
+ o 5
+ |\
+ | o 5
+ | |
+ o | 7
+ |\ \
+ | o | 7
+ | |/
+ o / 5
+ |/
+ o 3
+
+
+The point of graphwidth is to allow wrapping that accounts for the space taken
+by the graph.
+
+ $ COLUMNS=10 hg log --graph -T "{fill(desc, termwidth - graphwidth)}"
+ @ a a a a
+ | a a a a
+ | a a a a
+ o a a a
+ |\ a a a
+ | | a a a
+ | | a a a
+ | o a a a
+ | | a a a
+ | | a a a
+ | | a a a
+ o | a a
+ |\ \ a a
+ | | | a a
+ | | | a a
+ | | | a a
+ | | | a a
+ | o | a a
+ | |/ a a
+ | | a a
+ | | a a
+ | | a a
+ | | a a
+ o | a a a
+ |/ a a a
+ | a a a
+ | a a a
+ o a a a a
+ a a a a
+ a a a a
+
+Something tricky happens when there are elided nodes; the next drawn row of
+edges can be more than one column wider, but the graph width only increases by
+one column. The remaining columns are added in between the nodes.
+
+ $ hg log --graph -T "{graphwidth}" -r "0|2|4|5"
+ o 5
+ |\
+ | \
+ | :\
+ o : : 7
+ :/ /
+ : o 5
+ :/
+ o 3
+
+
+ $ cd ..
+
--- a/tests/test-commandserver.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-commandserver.t Sat Sep 30 07:52:48 2017 -0700
@@ -13,8 +13,10 @@
$ hg init repo
$ cd repo
- >>> from __future__ import print_function
- >>> from hgclient import readchannel, runcommand, check
+ >>> from __future__ import absolute_import, print_function
+ >>> import os
+ >>> import sys
+ >>> from hgclient import check, readchannel, runcommand
>>> @check
... def hellomessage(server):
... ch, data = readchannel(server)
@@ -32,7 +34,7 @@
... server.stdin.write('unknowncommand\n')
abort: unknown command unknowncommand
- >>> from hgclient import readchannel, runcommand, check
+ >>> from hgclient import check, readchannel, runcommand
>>> @check
... def checkruncommand(server):
... # hello block
@@ -91,7 +93,7 @@
abort: unknown revision 'unknown'!
[255]
- >>> from hgclient import readchannel, check
+ >>> from hgclient import check, readchannel
>>> @check
... def inputeof(server):
... readchannel(server)
@@ -103,7 +105,7 @@
... print('server exit code =', server.wait())
server exit code = 1
- >>> from hgclient import readchannel, runcommand, check, stringio
+ >>> from hgclient import check, readchannel, runcommand, stringio
>>> @check
... def serverinput(server):
... readchannel(server)
@@ -138,7 +140,7 @@
check that "histedit --commands=-" can read rules from the input channel:
>>> import cStringIO
- >>> from hgclient import readchannel, runcommand, check
+ >>> from hgclient import check, readchannel, runcommand
>>> @check
... def serverinput(server):
... readchannel(server)
@@ -152,7 +154,7 @@
$ mkdir foo
$ touch foo/bar
- >>> from hgclient import readchannel, runcommand, check
+ >>> from hgclient import check, readchannel, runcommand
>>> @check
... def cwd(server):
... readchannel(server)
@@ -173,7 +175,7 @@
> foo = bar
> EOF
- >>> from hgclient import readchannel, sep, runcommand, check
+ >>> from hgclient import check, readchannel, runcommand, sep
>>> @check
... def localhgrc(server):
... readchannel(server)
@@ -223,7 +225,7 @@
> print('now try to read something: %r' % sys.stdin.read())
> EOF
- >>> from hgclient import readchannel, runcommand, check, stringio
+ >>> from hgclient import check, readchannel, runcommand, stringio
>>> @check
... def hookoutput(server):
... readchannel(server)
@@ -240,7 +242,7 @@
$ echo a >> a
>>> import os
- >>> from hgclient import readchannel, runcommand, check
+ >>> from hgclient import check, readchannel, runcommand
>>> @check
... def outsidechanges(server):
... readchannel(server)
@@ -260,7 +262,7 @@
*** runcommand status
>>> import os
- >>> from hgclient import readchannel, runcommand, check
+ >>> from hgclient import check, readchannel, runcommand
>>> @check
... def bookmarks(server):
... readchannel(server)
@@ -281,6 +283,7 @@
... f.close()
... runcommand(server, ['commit', '-Amm'])
... runcommand(server, ['bookmarks'])
+ ... print('')
*** runcommand bookmarks
no bookmarks set
*** runcommand bookmarks
@@ -295,9 +298,10 @@
bm1 1:d3a0a68be6de
bm2 1:d3a0a68be6de
* bm3 2:aef17e88f5f0
+
>>> import os
- >>> from hgclient import readchannel, runcommand, check
+ >>> from hgclient import check, readchannel, runcommand
>>> @check
... def tagscache(server):
... readchannel(server)
@@ -310,7 +314,7 @@
foo
>>> import os
- >>> from hgclient import readchannel, runcommand, check
+ >>> from hgclient import check, readchannel, runcommand
>>> @check
... def setphase(server):
... readchannel(server)
@@ -323,7 +327,7 @@
3: public
$ echo a >> a
- >>> from hgclient import readchannel, runcommand, check
+ >>> from hgclient import check, readchannel, runcommand
>>> @check
... def rollback(server):
... readchannel(server)
@@ -331,6 +335,7 @@
... runcommand(server, ['commit', '-Am.'])
... runcommand(server, ['rollback'])
... runcommand(server, ['phase', '-r', '.'])
+ ... print('')
*** runcommand phase -r . -p
no phases changed
*** runcommand commit -Am.
@@ -339,9 +344,10 @@
working directory now based on revision 3
*** runcommand phase -r .
3: public
+
>>> import os
- >>> from hgclient import readchannel, runcommand, check
+ >>> from hgclient import check, readchannel, runcommand
>>> @check
... def branch(server):
... readchannel(server)
@@ -360,7 +366,7 @@
$ touch .hgignore
>>> import os
- >>> from hgclient import readchannel, runcommand, check
+ >>> from hgclient import check, readchannel, runcommand
>>> @check
... def hgignore(server):
... readchannel(server)
@@ -372,16 +378,18 @@
... f.write('ignored-file')
... f.close()
... runcommand(server, ['status', '-i', '-u'])
+ ... print('')
*** runcommand commit -Am.
adding .hgignore
*** runcommand status -i -u
I ignored-file
+
cache of non-public revisions should be invalidated on repository change
(issue4855):
>>> import os
- >>> from hgclient import readchannel, runcommand, check
+ >>> from hgclient import check, readchannel, runcommand
>>> @check
... def phasesetscacheaftercommit(server):
... readchannel(server)
@@ -396,15 +404,17 @@
... os.system('hg commit -Aqm%d' % i)
... # new commits should be listed as draft revisions
... runcommand(server, ['log', '-qr', 'draft()'])
+ ... print('')
*** runcommand log -qr draft()
4:7966c8e3734d
*** runcommand log -qr draft()
4:7966c8e3734d
5:41f6602d1c4f
6:10501e202c35
+
>>> import os
- >>> from hgclient import readchannel, runcommand, check
+ >>> from hgclient import check, readchannel, runcommand
>>> @check
... def phasesetscacheafterstrip(server):
... readchannel(server)
@@ -414,17 +424,19 @@
... os.system('hg --config extensions.strip= strip -q 5')
... # shouldn't abort by "unknown revision '6'"
... runcommand(server, ['log', '-qr', 'draft()'])
+ ... print('')
*** runcommand log -qr draft()
4:7966c8e3734d
5:41f6602d1c4f
6:10501e202c35
*** runcommand log -qr draft()
4:7966c8e3734d
+
cache of phase roots should be invalidated on strip (issue3827):
>>> import os
- >>> from hgclient import readchannel, sep, runcommand, check
+ >>> from hgclient import check, readchannel, runcommand, sep
>>> @check
... def phasecacheafterstrip(server):
... readchannel(server)
@@ -475,7 +487,7 @@
changelog and manifest would have invalid node:
$ echo a >> a
- >>> from hgclient import readchannel, runcommand, check
+ >>> from hgclient import check, readchannel, runcommand
>>> @check
... def txabort(server):
... readchannel(server)
@@ -497,11 +509,11 @@
$ cat >> .hg/hgrc << EOF
> [experimental]
- > evolution=createmarkers
+ > stabilization=createmarkers
> EOF
>>> import os
- >>> from hgclient import readchannel, runcommand, check
+ >>> from hgclient import check, readchannel, runcommand
>>> @check
... def obsolete(server):
... readchannel(server)
@@ -550,7 +562,7 @@
> EOF
>>> import os
- >>> from hgclient import readchannel, runcommand, check
+ >>> from hgclient import check, readchannel, runcommand
>>> @check
... def mqoutsidechanges(server):
... readchannel(server)
@@ -575,7 +587,8 @@
foo
$ cat <<EOF > dbgui.py
- > import os, sys
+ > import os
+ > import sys
> from mercurial import commands, registrar
> cmdtable = {}
> command = registrar.command(cmdtable)
@@ -599,7 +612,7 @@
> dbgui = dbgui.py
> EOF
- >>> from hgclient import readchannel, runcommand, check, stringio
+ >>> from hgclient import check, readchannel, runcommand, stringio
>>> @check
... def getpass(server):
... readchannel(server)
@@ -634,7 +647,7 @@
run commandserver in commandserver, which is silly but should work:
>>> from __future__ import print_function
- >>> from hgclient import readchannel, runcommand, check, stringio
+ >>> from hgclient import check, readchannel, runcommand, stringio
>>> @check
... def nested(server):
... print('%c, %r' % readchannel(server))
@@ -657,7 +670,7 @@
$ cd ..
>>> from __future__ import print_function
- >>> from hgclient import readchannel, runcommand, check
+ >>> from hgclient import check, readchannel, runcommand
>>> @check
... def hellomessage(server):
... ch, data = readchannel(server)
@@ -670,7 +683,7 @@
abort: there is no Mercurial repository here (.hg not found)
[255]
- >>> from hgclient import readchannel, runcommand, check
+ >>> from hgclient import check, readchannel, runcommand
>>> @check
... def startwithoutrepo(server):
... readchannel(server)
@@ -698,7 +711,7 @@
#if unix-socket unix-permissions
>>> from __future__ import print_function
- >>> from hgclient import unixserver, readchannel, runcommand, check, stringio
+ >>> from hgclient import check, readchannel, runcommand, stringio, unixserver
>>> server = unixserver('.hg/server.sock', '.hg/server.log')
>>> def hellomessage(conn):
... ch, data = readchannel(conn)
@@ -750,7 +763,7 @@
> log = inexistent/path.log
> EOF
>>> from __future__ import print_function
- >>> from hgclient import unixserver, readchannel, check
+ >>> from hgclient import check, readchannel, unixserver
>>> server = unixserver('.hg/server.sock', '.hg/server.log')
>>> def earlycrash(conn):
... while True:
@@ -827,7 +840,7 @@
(failure before finalization)
- >>> from hgclient import readchannel, runcommand, check
+ >>> from hgclient import check, readchannel, runcommand
>>> @check
... def abort(server):
... readchannel(server)
@@ -846,7 +859,7 @@
(failure after finalization)
- >>> from hgclient import readchannel, runcommand, check
+ >>> from hgclient import check, readchannel, runcommand
>>> @check
... def abort(server):
... readchannel(server)
@@ -871,7 +884,7 @@
(failure before finalization)
- >>> from hgclient import readchannel, runcommand, check
+ >>> from hgclient import check, readchannel, runcommand
>>> @check
... def abort(server):
... readchannel(server)
@@ -891,7 +904,7 @@
(failure after finalization)
- >>> from hgclient import readchannel, runcommand, check
+ >>> from hgclient import check, readchannel, runcommand
>>> @check
... def abort(server):
... readchannel(server)
@@ -941,7 +954,7 @@
and the merge should fail (issue5628)
$ hg up -q null
- >>> from hgclient import readchannel, runcommand, check
+ >>> from hgclient import check, readchannel, runcommand
>>> @check
... def merge(server):
... readchannel(server)
@@ -962,7 +975,7 @@
$ hg up -qC 0
$ touch ../merge-symlink-out/poisoned
- >>> from hgclient import readchannel, runcommand, check
+ >>> from hgclient import check, readchannel, runcommand
>>> @check
... def files(server):
... readchannel(server)
--- a/tests/test-commit-amend.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-commit-amend.t Sat Sep 30 07:52:48 2017 -0700
@@ -40,7 +40,7 @@
$ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit --amend -m 'amend base1'
pretxncommit 43f1ba15f28a50abf0aae529cf8a16bfced7b149
43f1ba15f28a tip
- saved backup bundle to $TESTTMP/.hg/strip-backup/489edb5b847d-f1bf3ab8-amend.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/489edb5b847d-5ab4f721-amend.hg (glob)
$ echo 'pretxncommit.foo = ' >> $HGRCPATH
$ hg diff -c .
diff -r ad120869acf0 -r 43f1ba15f28a a
@@ -69,31 +69,36 @@
> #!/bin/sh
> echo "" > "$1"
> __EOF__
+
+Update the existing file to ensure that the dirstate is not in pending state
+(where the status of some files in the working copy is not known yet). This in
+turn ensures that when the transaction is aborted due to an empty message during
+the amend, there should be no rollback.
+ $ echo a >> a
+
$ echo b > b
$ hg add b
$ hg summary
parent: 1:43f1ba15f28a tip
amend base1
branch: default
- commit: 1 added, 1 unknown
+ commit: 1 modified, 1 added, 1 unknown
update: (current)
phases: 2 draft
$ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit --amend
- transaction abort!
- rollback completed
abort: empty commit message
[255]
$ hg summary
parent: 1:43f1ba15f28a tip
amend base1
branch: default
- commit: 1 added, 1 unknown
+ commit: 1 modified, 1 added, 1 unknown
update: (current)
phases: 2 draft
-Add new file:
+Add new file along with modified existing file:
$ hg ci --amend -m 'amend base1 new file'
- saved backup bundle to $TESTTMP/.hg/strip-backup/43f1ba15f28a-7a3b3496-amend.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/43f1ba15f28a-007467c2-amend.hg (glob)
Remove file that was added in amended commit:
(and test logfile option)
@@ -102,17 +107,17 @@
$ hg rm b
$ echo 'amend base1 remove new file' > ../logfile
$ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg ci --amend --logfile ../logfile
- saved backup bundle to $TESTTMP/.hg/strip-backup/b8e3cb2b3882-0b55739a-amend.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/c16295aaf401-1ada9901-amend.hg (glob)
$ hg cat b
- b: no such file in rev 74609c7f506e
+ b: no such file in rev 47343646fa3d
[1]
No changes, just a different message:
$ hg ci -v --amend -m 'no changes, new message'
- amending changeset 74609c7f506e
- copying changeset 74609c7f506e to ad120869acf0
+ amending changeset 47343646fa3d
+ copying changeset 47343646fa3d to ad120869acf0
committing files:
a
committing manifest
@@ -121,29 +126,30 @@
uncompressed size of bundle content:
254 (changelog)
163 (manifests)
- 129 a
- saved backup bundle to $TESTTMP/.hg/strip-backup/74609c7f506e-1bfde511-amend.hg (glob)
+ 131 a
+ saved backup bundle to $TESTTMP/.hg/strip-backup/47343646fa3d-c2758885-amend.hg (glob)
1 changesets found
uncompressed size of bundle content:
250 (changelog)
163 (manifests)
- 129 a
+ 131 a
adding branch
adding changesets
adding manifests
adding file changes
added 1 changesets with 1 changes to 1 files
- committed changeset 1:1cd866679df8
+ committed changeset 1:401431e913a1
$ hg diff -c .
- diff -r ad120869acf0 -r 1cd866679df8 a
+ diff -r ad120869acf0 -r 401431e913a1 a
--- a/a Thu Jan 01 00:00:00 1970 +0000
+++ b/a Thu Jan 01 00:00:00 1970 +0000
- @@ -1,1 +1,3 @@
+ @@ -1,1 +1,4 @@
a
+a
+a
+ +a
$ hg log
- changeset: 1:1cd866679df8
+ changeset: 1:401431e913a1
tag: tip
user: test
date: Thu Jan 01 00:00:00 1970 +0000
@@ -168,12 +174,12 @@
> EOF
$ HGEDITOR="sh .hg/checkeditform.sh" hg ci --amend -u foo -d '1 0'
HGEDITFORM=commit.amend.normal
- saved backup bundle to $TESTTMP/.hg/strip-backup/1cd866679df8-5f5bcb85-amend.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/401431e913a1-5e8e532c-amend.hg (glob)
$ echo a >> a
$ hg ci --amend -u foo -d '1 0'
- saved backup bundle to $TESTTMP/.hg/strip-backup/780e6f23e03d-83b10a27-amend.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/d96b1d28ae33-677e0afb-amend.hg (glob)
$ hg log -r .
- changeset: 1:5f357c7560ab
+ changeset: 1:a9a13940fc03
tag: tip
user: foo
date: Thu Jan 01 00:00:01 1970 +0000
@@ -197,8 +203,8 @@
$ rm -f .hg/last-message.txt
$ hg commit --amend -v -m "message given from command line"
- amending changeset 5f357c7560ab
- copying changeset 5f357c7560ab to ad120869acf0
+ amending changeset a9a13940fc03
+ copying changeset a9a13940fc03 to ad120869acf0
committing files:
a
committing manifest
@@ -213,8 +219,8 @@
$ rm -f .hg/last-message.txt
$ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit --amend -v
- amending changeset 5f357c7560ab
- copying changeset 5f357c7560ab to ad120869acf0
+ amending changeset a9a13940fc03
+ copying changeset a9a13940fc03 to ad120869acf0
no changes, new message
@@ -245,8 +251,8 @@
then, test editing custom commit message
$ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit --amend -v
- amending changeset 5f357c7560ab
- copying changeset 5f357c7560ab to ad120869acf0
+ amending changeset a9a13940fc03
+ copying changeset a9a13940fc03 to ad120869acf0
no changes, new message
@@ -264,30 +270,25 @@
uncompressed size of bundle content:
249 (changelog)
163 (manifests)
- 131 a
- saved backup bundle to $TESTTMP/.hg/strip-backup/5f357c7560ab-e7c84ade-amend.hg (glob)
+ 133 a
+ saved backup bundle to $TESTTMP/.hg/strip-backup/a9a13940fc03-7c2e8674-amend.hg (glob)
1 changesets found
uncompressed size of bundle content:
257 (changelog)
163 (manifests)
- 131 a
+ 133 a
adding branch
adding changesets
adding manifests
adding file changes
added 1 changesets with 1 changes to 1 files
- committed changeset 1:7ab3bf440b54
+ committed changeset 1:64a124ba1b44
Same, but with changes in working dir (different code path):
$ echo a >> a
$ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit --amend -v
- amending changeset 7ab3bf440b54
- committing files:
- a
- committing manifest
- committing changelog
- copying changeset a0ea9b1a4c8c to ad120869acf0
+ amending changeset 64a124ba1b44
another precious commit message
@@ -301,27 +302,27 @@
a
committing manifest
committing changelog
- 2 changesets found
- uncompressed size of bundle content:
- 464 (changelog)
- 322 (manifests)
- 249 a
- saved backup bundle to $TESTTMP/.hg/strip-backup/7ab3bf440b54-8e3b5088-amend.hg (glob)
1 changesets found
uncompressed size of bundle content:
257 (changelog)
163 (manifests)
133 a
+ saved backup bundle to $TESTTMP/.hg/strip-backup/64a124ba1b44-10374b8f-amend.hg (glob)
+ 1 changesets found
+ uncompressed size of bundle content:
+ 257 (changelog)
+ 163 (manifests)
+ 135 a
adding branch
adding changesets
adding manifests
adding file changes
added 1 changesets with 1 changes to 1 files
- committed changeset 1:ea22a388757c
+ committed changeset 1:7892795b8e38
$ rm editor.sh
$ hg log -r .
- changeset: 1:ea22a388757c
+ changeset: 1:7892795b8e38
tag: tip
user: foo
date: Thu Jan 01 00:00:01 1970 +0000
@@ -333,16 +334,16 @@
$ hg book book1
$ hg book book2
$ hg ci --amend -m 'move bookmarks'
- saved backup bundle to $TESTTMP/.hg/strip-backup/ea22a388757c-e51094db-amend.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/7892795b8e38-3fb46217-amend.hg (glob)
$ hg book
- book1 1:6cec5aa930e2
- * book2 1:6cec5aa930e2
+ book1 1:8311f17e2616
+ * book2 1:8311f17e2616
$ echo a >> a
$ hg ci --amend -m 'move bookmarks'
- saved backup bundle to $TESTTMP/.hg/strip-backup/6cec5aa930e2-e9b06de4-amend.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/8311f17e2616-f0504fe3-amend.hg (glob)
$ hg book
- book1 1:48bb6e53a15f
- * book2 1:48bb6e53a15f
+ book1 1:a3b65065808c
+ * book2 1:a3b65065808c
abort does not loose bookmarks
@@ -352,13 +353,11 @@
> __EOF__
$ echo a >> a
$ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit --amend
- transaction abort!
- rollback completed
abort: empty commit message
[255]
$ hg book
- book1 1:48bb6e53a15f
- * book2 1:48bb6e53a15f
+ book1 1:a3b65065808c
+ * book2 1:a3b65065808c
$ hg revert -Caq
$ rm editor.sh
@@ -375,9 +374,9 @@
$ hg branch default -f
marked working directory as branch default
$ hg ci --amend -m 'back to default'
- saved backup bundle to $TESTTMP/.hg/strip-backup/8ac881fbf49d-fd962fef-amend.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/f8339a38efe1-c18453c9-amend.hg (glob)
$ hg branches
- default 2:ce12b0b57d46
+ default 2:9c07515f2650
Close branch:
@@ -391,7 +390,7 @@
$ echo b >> b
$ hg ci -mb
$ hg ci --amend --close-branch -m 'closing branch foo'
- saved backup bundle to $TESTTMP/.hg/strip-backup/c962248fa264-6701c392-amend.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/c962248fa264-54245dc7-amend.hg (glob)
Same thing, different code path:
@@ -400,9 +399,9 @@
reopening closed branch head 4
$ echo b >> b
$ hg ci --amend --close-branch
- saved backup bundle to $TESTTMP/.hg/strip-backup/027371728205-49c0c55d-amend.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/027371728205-b900d9fa-amend.hg (glob)
$ hg branches
- default 2:ce12b0b57d46
+ default 2:9c07515f2650
Refuse to amend during a merge:
@@ -421,7 +420,7 @@
$ hg ci -m 'b -> c'
$ hg mv c d
$ hg ci --amend -m 'b -> d'
- saved backup bundle to $TESTTMP/.hg/strip-backup/b8c6eac7f12e-adaaa8b1-amend.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/42f3f27a067d-f23cc9f7-amend.hg (glob)
$ hg st --rev '.^' --copies d
A d
b
@@ -429,7 +428,7 @@
$ hg ci -m 'e = d'
$ hg cp e f
$ hg ci --amend -m 'f = d'
- saved backup bundle to $TESTTMP/.hg/strip-backup/7f9761d65613-d37aa788-amend.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/9198f73182d5-251d584a-amend.hg (glob)
$ hg st --rev '.^' --copies f
A f
d
@@ -440,7 +439,7 @@
$ hg cp a f
$ mv f.orig f
$ hg ci --amend -m replacef
- saved backup bundle to $TESTTMP/.hg/strip-backup/9e8c5f7e3d95-90259f67-amend.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/f0993ab6b482-eda301bf-amend.hg (glob)
$ hg st --change . --copies
$ hg log -r . --template "{file_copies}\n"
@@ -452,7 +451,7 @@
adding g
$ hg mv g h
$ hg ci --amend
- saved backup bundle to $TESTTMP/.hg/strip-backup/24aa8eacce2b-7059e0f1-amend.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/58585e3f095c-0f5ebcda-amend.hg (glob)
$ hg st --change . --copies h
A h
$ hg log -r . --template "{file_copies}\n"
@@ -472,11 +471,11 @@
$ echo a >> a
$ hg ci -ma
$ hg ci --amend -m "a'"
- saved backup bundle to $TESTTMP/.hg/strip-backup/3837aa2a2fdb-2be01fd1-amend.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/39a162f1d65e-9dfe13d8-amend.hg (glob)
$ hg log -r . --template "{branch}\n"
a
$ hg ci --amend -m "a''"
- saved backup bundle to $TESTTMP/.hg/strip-backup/c05c06be7514-ed28c4cd-amend.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/d5ca7b1ac72b-0b4c1a34-amend.hg (glob)
$ hg log -r . --template "{branch}\n"
a
@@ -493,9 +492,9 @@
$ hg graft 12
grafting 12:2647734878ef "fork" (tip)
$ hg ci --amend -m 'graft amend'
- saved backup bundle to $TESTTMP/.hg/strip-backup/bd010aea3f39-eedb103b-amend.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/fe8c6f7957ca-25638666-amend.hg (glob)
$ hg log -r . --debug | grep extra
- extra: amend_source=bd010aea3f39f3fb2a2f884b9ccb0471cd77398e
+ extra: amend_source=fe8c6f7957ca1665ed77496ed7a07657d469ac60
extra: branch=a
extra: source=2647734878ef0236dda712fae9c1651cf694ea8a
@@ -520,7 +519,7 @@
$ cat >> $HGRCPATH << EOF
> [experimental]
- > evolution=createmarkers,allowunstable
+ > stabilization=createmarkers,allowunstable
> EOF
Amend with no files changes
@@ -531,26 +530,26 @@
$ hg id -n
14
$ hg log -Gl 3 --style=compact
- @ 14[tip]:11 b650e6ee8614 1970-01-01 00:00 +0000 test
+ @ 14[tip]:11 682950e85999 1970-01-01 00:00 +0000 test
| babar
|
| o 12:0 2647734878ef 1970-01-01 00:00 +0000 test
| | fork
| ~
- o 11 3334b7925910 1970-01-01 00:00 +0000 test
+ o 11 0ddb275cfad1 1970-01-01 00:00 +0000 test
| a''
~
$ hg log -Gl 4 --hidden --style=compact
- @ 14[tip]:11 b650e6ee8614 1970-01-01 00:00 +0000 test
+ @ 14[tip]:11 682950e85999 1970-01-01 00:00 +0000 test
| babar
|
- | x 13:11 68ff8ff97044 1970-01-01 00:00 +0000 test
+ | x 13:11 5167600b0f7a 1970-01-01 00:00 +0000 test
|/ amend for phase
|
| o 12:0 2647734878ef 1970-01-01 00:00 +0000 test
| | fork
| ~
- o 11 3334b7925910 1970-01-01 00:00 +0000 test
+ o 11 0ddb275cfad1 1970-01-01 00:00 +0000 test
| a''
~
@@ -562,23 +561,23 @@
$ echo 'babar' >> a
$ hg commit --amend
$ hg log -Gl 6 --hidden --style=compact
- @ 16[tip]:11 9f9e9bccf56c 1970-01-01 00:00 +0000 test
+ @ 15[tip]:11 a5b42b49b0d5 1970-01-01 00:00 +0000 test
| babar
|
- | x 15 90fef497c56f 1970-01-01 00:00 +0000 test
- | | temporary amend commit for b650e6ee8614
- | |
- | x 14:11 b650e6ee8614 1970-01-01 00:00 +0000 test
+ | x 14:11 682950e85999 1970-01-01 00:00 +0000 test
|/ babar
|
- | x 13:11 68ff8ff97044 1970-01-01 00:00 +0000 test
+ | x 13:11 5167600b0f7a 1970-01-01 00:00 +0000 test
|/ amend for phase
|
| o 12:0 2647734878ef 1970-01-01 00:00 +0000 test
| | fork
| ~
- o 11 3334b7925910 1970-01-01 00:00 +0000 test
+ o 11 0ddb275cfad1 1970-01-01 00:00 +0000 test
| a''
+ |
+ o 10 5fa75032e226 1970-01-01 00:00 +0000 test
+ | g
~
@@ -586,12 +585,12 @@
---------------------------------------------------------------------
$ hg id -r 14 --hidden
- b650e6ee8614 (a)
+ 682950e85999 (a)
$ hg revert -ar 14 --hidden
reverting a
$ hg commit --amend
$ hg id
- b99e5df575f7 (a) tip
+ 37973c7e0b61 (a) tip
Test that rewriting leaving instability behind is allowed
---------------------------------------------------------------------
@@ -600,17 +599,17 @@
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ echo 'b' >> a
$ hg log --style compact -r 'children(.)'
- 18[tip]:11 b99e5df575f7 1970-01-01 00:00 +0000 test
+ 16[tip]:11 37973c7e0b61 1970-01-01 00:00 +0000 test
babar
$ hg commit --amend
- $ hg log -r 'unstable()'
- changeset: 18:b99e5df575f7
+ $ hg log -r 'orphan()'
+ changeset: 16:37973c7e0b61
branch: a
- parent: 11:3334b7925910
+ parent: 11:0ddb275cfad1
user: test
date: Thu Jan 01 00:00:00 1970 +0000
- trouble: unstable
+ instability: orphan
summary: babar
@@ -635,10 +634,10 @@
(no more unresolved files)
$ hg ci -m 'merge bar'
$ hg log --config diff.git=1 -pr .
- changeset: 23:163cfd7219f7
+ changeset: 20:163cfd7219f7
tag: tip
- parent: 22:30d96aeaf27b
- parent: 21:1aa437659d19
+ parent: 19:30d96aeaf27b
+ parent: 18:1aa437659d19
user: test
date: Thu Jan 01 00:00:00 1970 +0000
summary: merge bar
@@ -668,10 +667,10 @@
$ HGEDITOR="sh .hg/checkeditform.sh" hg ci --amend -m 'merge bar (amend message)' --edit
HGEDITFORM=commit.amend.merge
$ hg log --config diff.git=1 -pr .
- changeset: 24:bca52d4ed186
+ changeset: 21:bca52d4ed186
tag: tip
- parent: 22:30d96aeaf27b
- parent: 21:1aa437659d19
+ parent: 19:30d96aeaf27b
+ parent: 18:1aa437659d19
user: test
date: Thu Jan 01 00:00:00 1970 +0000
summary: merge bar (amend message)
@@ -701,10 +700,10 @@
$ hg mv zz z
$ hg ci --amend -m 'merge bar (undo rename)'
$ hg log --config diff.git=1 -pr .
- changeset: 26:12594a98ca3f
+ changeset: 22:12594a98ca3f
tag: tip
- parent: 22:30d96aeaf27b
- parent: 21:1aa437659d19
+ parent: 19:30d96aeaf27b
+ parent: 18:1aa437659d19
user: test
date: Thu Jan 01 00:00:00 1970 +0000
summary: merge bar (undo rename)
@@ -737,10 +736,10 @@
$ echo aa >> aaa
$ hg ci -m 'merge bar again'
$ hg log --config diff.git=1 -pr .
- changeset: 28:dffde028b388
+ changeset: 24:dffde028b388
tag: tip
- parent: 26:12594a98ca3f
- parent: 27:4c94d5bc65f5
+ parent: 22:12594a98ca3f
+ parent: 23:4c94d5bc65f5
user: test
date: Thu Jan 01 00:00:00 1970 +0000
summary: merge bar again
@@ -772,10 +771,10 @@
$ hg mv aaa aa
$ hg ci --amend -m 'merge bar again (undo rename)'
$ hg log --config diff.git=1 -pr .
- changeset: 30:18e3ba160489
+ changeset: 25:18e3ba160489
tag: tip
- parent: 26:12594a98ca3f
- parent: 27:4c94d5bc65f5
+ parent: 22:12594a98ca3f
+ parent: 23:4c94d5bc65f5
user: test
date: Thu Jan 01 00:00:00 1970 +0000
summary: merge bar again (undo rename)
@@ -814,10 +813,10 @@
use (c)hanged version, (d)elete, or leave (u)nresolved? c
$ hg ci -m 'merge bar (with conflicts)'
$ hg log --config diff.git=1 -pr .
- changeset: 33:b4c3035e2544
+ changeset: 28:b4c3035e2544
tag: tip
- parent: 32:4b216ca5ba97
- parent: 31:67db8847a540
+ parent: 27:4b216ca5ba97
+ parent: 26:67db8847a540
user: test
date: Thu Jan 01 00:00:00 1970 +0000
summary: merge bar (with conflicts)
@@ -826,10 +825,10 @@
$ hg rm aa
$ hg ci --amend -m 'merge bar (with conflicts, amended)'
$ hg log --config diff.git=1 -pr .
- changeset: 35:1205ed810051
+ changeset: 29:1205ed810051
tag: tip
- parent: 32:4b216ca5ba97
- parent: 31:67db8847a540
+ parent: 27:4b216ca5ba97
+ parent: 26:67db8847a540
user: test
date: Thu Jan 01 00:00:00 1970 +0000
summary: merge bar (with conflicts, amended)
@@ -870,12 +869,12 @@
---------------------------------------------------------------------
$ hg phase '.^::.'
- 35: draft
- 36: draft
+ 29: draft
+ 30: draft
$ hg commit --amend --secret -m 'amend as secret' -q
$ hg phase '.^::.'
- 35: draft
- 38: secret
+ 29: draft
+ 31: secret
Test that amend with --edit invokes editor forcibly
---------------------------------------------------
@@ -1065,12 +1064,12 @@
o 0 a0
-The way mercurial does amends is to create a temporary commit (rev 3) and then
-fold the new and old commits together into another commit (rev 4). During this
-process, _findlimit is called to check how far back to look for the transitive
-closure of file copy information, but due to the divergence of the filelog
-and changelog graph topologies, before _findlimit was fixed, it returned a rev
-which was not far enough back in this case.
+The way mercurial does amends is by folding the working copy and old commit
+together into another commit (rev 3). During this process, _findlimit is called
+to check how far back to look for the transitive closure of file copy
+information, but due to the divergence of the filelog and changelog graph
+topologies, before _findlimit was fixed, it returned a rev which was not far
+enough back in this case.
$ hg mv a1 a2
$ hg status --copies --rev 0
A a2
@@ -1078,7 +1077,7 @@
R a0
$ hg ci --amend -q
$ hg log -G --template '{rev} {desc}'
- @ 4 a1-amend
+ @ 3 a1-amend
|
| o 1 a1
|/
@@ -1161,10 +1160,10 @@
$ hg ci --amend -m "chmod amended"
$ hg ci --amend -m "chmod amended second time"
$ hg log -p --git -r .
- changeset: 8:b1326f52dddf
+ changeset: 7:b1326f52dddf
branch: newdirname
tag: tip
- parent: 5:7fd235f7cb2f
+ parent: 4:7fd235f7cb2f
user: test
date: Thu Jan 01 00:00:00 1970 +0000
summary: chmod amended second time
@@ -1174,3 +1173,96 @@
new mode 100755
#endif
+
+Test amend with file inclusion options
+--------------------------------------
+
+These tests ensure that we are always amending some files that were part of the
+pre-amend commit. We want to test that the remaining files in the pre-amend
+commit were not changed in the amended commit. We do so by performing a diff of
+the amended commit against its parent commit.
+ $ cd ..
+ $ hg init testfileinclusions
+ $ cd testfileinclusions
+ $ echo a > a
+ $ echo b > b
+ $ hg commit -Aqm "Adding a and b"
+
+Only add changes to a particular file
+ $ echo a >> a
+ $ echo b >> b
+ $ hg commit --amend -I a
+ $ hg diff --git -r null -r .
+ diff --git a/a b/a
+ new file mode 100644
+ --- /dev/null
+ +++ b/a
+ @@ -0,0 +1,2 @@
+ +a
+ +a
+ diff --git a/b b/b
+ new file mode 100644
+ --- /dev/null
+ +++ b/b
+ @@ -0,0 +1,1 @@
+ +b
+
+ $ echo a >> a
+ $ hg commit --amend b
+ $ hg diff --git -r null -r .
+ diff --git a/a b/a
+ new file mode 100644
+ --- /dev/null
+ +++ b/a
+ @@ -0,0 +1,2 @@
+ +a
+ +a
+ diff --git a/b b/b
+ new file mode 100644
+ --- /dev/null
+ +++ b/b
+ @@ -0,0 +1,2 @@
+ +b
+ +b
+
+Exclude changes to a particular file
+ $ echo b >> b
+ $ hg commit --amend -X a
+ $ hg diff --git -r null -r .
+ diff --git a/a b/a
+ new file mode 100644
+ --- /dev/null
+ +++ b/a
+ @@ -0,0 +1,2 @@
+ +a
+ +a
+ diff --git a/b b/b
+ new file mode 100644
+ --- /dev/null
+ +++ b/b
+ @@ -0,0 +1,3 @@
+ +b
+ +b
+ +b
+
+Check the addremove flag
+ $ echo c > c
+ $ rm a
+ $ hg commit --amend -A
+ removing a
+ adding c
+ $ hg diff --git -r null -r .
+ diff --git a/b b/b
+ new file mode 100644
+ --- /dev/null
+ +++ b/b
+ @@ -0,0 +1,3 @@
+ +b
+ +b
+ +b
+ diff --git a/c b/c
+ new file mode 100644
+ --- /dev/null
+ +++ b/c
+ @@ -0,0 +1,1 @@
+ +c
--- a/tests/test-commit-interactive-curses.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-commit-interactive-curses.t Sat Sep 30 07:52:48 2017 -0700
@@ -206,7 +206,7 @@
> X
> EOF
$ hg commit -i -m "newly added file" -d "0 0"
- saved backup bundle to $TESTTMP/a/.hg/strip-backup/2b0e9be4d336-28bbe4e2-amend.hg (glob)
+ saved backup bundle to $TESTTMP/a/.hg/strip-backup/2b0e9be4d336-3cf0bc8c-amend.hg (glob)
$ hg diff -c .
diff -r a6735021574d -r c1d239d165ae x
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -345,7 +345,7 @@
> $PYTHON <<EOF
> from mercurial import hg, ui;\
> repo = hg.repository(ui.ui.load(), ".");\
- > print repo.ui.interface("chunkselector")
+ > print(repo.ui.interface("chunkselector"))
> EOF
> }
$ chunkselectorinterface
--- a/tests/test-commit-multiple.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-commit-multiple.t Sat Sep 30 07:52:48 2017 -0700
@@ -90,7 +90,7 @@
> f.close()
>
> def printfiles(repo, rev):
- > print "revision %s files: %s" % (rev, repo[rev].files())
+ > print("revision %s files: %s" % (rev, repo[rev].files()))
>
> repo = hg.repository(ui.ui.load(), '.')
> assert len(repo) == 6, \
@@ -99,14 +99,14 @@
> replacebyte("bugfix", "u")
> sleep(2)
> try:
- > print "PRE: len(repo): %d" % len(repo)
+ > print("PRE: len(repo): %d" % len(repo))
> wlock = repo.wlock()
> lock = repo.lock()
> replacebyte("file1", "x")
> repo.commit(text="x", user="test", date=(0, 0))
> replacebyte("file1", "y")
> repo.commit(text="y", user="test", date=(0, 0))
- > print "POST: len(repo): %d" % len(repo)
+ > print("POST: len(repo): %d" % len(repo))
> finally:
> lock.release()
> wlock.release()
--- a/tests/test-commit.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-commit.t Sat Sep 30 07:52:48 2017 -0700
@@ -642,9 +642,10 @@
verify pathauditor blocks evil filepaths
$ cat > evil-commit.py <<EOF
- > from mercurial import ui, hg, context, node
+ > from __future__ import absolute_import
+ > from mercurial import context, hg, node, ui as uimod
> notrc = u".h\u200cg".encode('utf-8') + '/hgrc'
- > u = ui.ui.load()
+ > u = uimod.ui.load()
> r = hg.repository(u, '.')
> def filectxfn(repo, memctx, path):
> return context.memfilectx(repo, path, '[hooks]\nupdate = echo owned')
@@ -666,9 +667,10 @@
$ hg rollback -f
repository tip rolled back to revision 2 (undo commit)
$ cat > evil-commit.py <<EOF
- > from mercurial import ui, hg, context, node
+ > from __future__ import absolute_import
+ > from mercurial import context, hg, node, ui as uimod
> notrc = "HG~1/hgrc"
- > u = ui.ui.load()
+ > u = uimod.ui.load()
> r = hg.repository(u, '.')
> def filectxfn(repo, memctx, path):
> return context.memfilectx(repo, path, '[hooks]\nupdate = echo owned')
@@ -684,9 +686,10 @@
$ hg rollback -f
repository tip rolled back to revision 2 (undo commit)
$ cat > evil-commit.py <<EOF
- > from mercurial import ui, hg, context, node
+ > from __future__ import absolute_import
+ > from mercurial import context, hg, node, ui as uimod
> notrc = "HG8B6C~2/hgrc"
- > u = ui.ui.load()
+ > u = uimod.ui.load()
> r = hg.repository(u, '.')
> def filectxfn(repo, memctx, path):
> return context.memfilectx(repo, path, '[hooks]\nupdate = echo owned')
--- a/tests/test-completion.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-completion.t Sat Sep 30 07:52:48 2017 -0700
@@ -218,17 +218,17 @@
Show all commands + options
$ hg debugcommands
add: include, exclude, subrepos, dry-run
- annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, skip, ignore-all-space, ignore-space-change, ignore-blank-lines, include, exclude, template
+ annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, skip, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, include, exclude, template
clone: noupdate, updaterev, rev, branch, pull, uncompressed, ssh, remotecmd, insecure
commit: addremove, close-branch, amend, secret, edit, interactive, include, exclude, message, logfile, date, user, subrepos
- diff: rev, change, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, unified, stat, root, include, exclude, subrepos
+ diff: rev, change, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, unified, stat, root, include, exclude, subrepos
export: output, switch-parent, rev, text, git, binary, nodates
forget: include, exclude
init: ssh, remotecmd, insecure
log: follow, follow-first, date, copies, keyword, rev, removed, only-merges, user, only-branch, branch, prune, patch, git, limit, no-merges, stat, graph, style, template, include, exclude
merge: force, rev, preview, tool
pull: update, force, rev, bookmark, branch, ssh, remotecmd, insecure
- push: force, rev, bookmark, branch, new-branch, ssh, remotecmd, insecure
+ push: force, rev, bookmark, branch, new-branch, pushvars, ssh, remotecmd, insecure
remove: after, force, subrepos, include, exclude
serve: accesslog, daemon, daemon-postexec, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, cmdserver, templates, style, ipv6, certificate, subrepos
status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, terse, copies, print0, rev, change, include, exclude, subrepos, template
--- a/tests/test-conflict.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-conflict.t Sat Sep 30 07:52:48 2017 -0700
@@ -44,6 +44,23 @@
$ hg id
618808747361+c0c68e4fe667+ tip
+ $ echo "[commands]" >> $HGRCPATH
+ $ echo "status.verbose=true" >> $HGRCPATH
+ $ hg status
+ M a
+ ? a.orig
+ # The repository is in an unfinished *merge* state.
+
+ # Unresolved merge conflicts:
+ #
+ # a
+ #
+ # To mark files as resolved: hg resolve --mark FILE
+
+ # To continue: hg commit
+ # To abort: hg update --clean . (warning: this will discard uncommitted changes)
+
+
$ cat a
Small Mathematical Series.
1
@@ -58,7 +75,7 @@
>>>>>>> merge rev: c0c68e4fe667 - test: branch1
Hop we are done.
- $ hg status
+ $ hg status --config commands.status.verbose=0
M a
? a.orig
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-context-metadata.t Sat Sep 30 07:52:48 2017 -0700
@@ -0,0 +1,50 @@
+Tests about metadataonlyctx
+
+ $ hg init
+ $ echo A > A
+ $ hg commit -A A -m 'Add A'
+ $ echo B > B
+ $ hg commit -A B -m 'Add B'
+ $ hg rm A
+ $ echo C > C
+ $ echo B2 > B
+ $ hg add C -q
+ $ hg commit -m 'Remove A'
+
+ $ cat > metaedit.py <<EOF
+ > from __future__ import absolute_import
+ > from mercurial import context, registrar
+ > cmdtable = {}
+ > command = registrar.command(cmdtable)
+ > @command('metaedit')
+ > def metaedit(ui, repo, arg):
+ > # Modify commit message to "FOO"
+ > with repo.wlock(), repo.lock(), repo.transaction('metaedit'):
+ > old = repo['.']
+ > kwargs = dict(s.split('=', 1) for s in arg.split(';'))
+ > if 'parents' in kwargs:
+ > kwargs['parents'] = kwargs['parents'].split(',')
+ > new = context.metadataonlyctx(repo, old, **kwargs)
+ > new.commit()
+ > EOF
+ $ hg --config extensions.metaedit=$TESTTMP/metaedit.py metaedit 'text=Changed'
+ $ hg log -r tip
+ changeset: 3:ad83e9e00ec9
+ tag: tip
+ parent: 1:3afb7afe6632
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: Changed
+
+ $ hg --config extensions.metaedit=$TESTTMP/metaedit.py metaedit 'parents=0' 2>&1 | egrep '^RuntimeError'
+ RuntimeError: can't reuse the manifest: its p1 doesn't match the new ctx p1
+
+ $ hg --config extensions.metaedit=$TESTTMP/metaedit.py metaedit 'user=foo <foo@example.com>'
+ $ hg log -r tip
+ changeset: 4:1f86eaeca92b
+ tag: tip
+ parent: 1:3afb7afe6632
+ user: foo <foo@example.com>
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: Remove A
+
--- a/tests/test-context.py Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-context.py Sat Sep 30 07:52:48 2017 -0700
@@ -178,3 +178,14 @@
print('data mismatch')
except Exception as ex:
print('cannot read data: %r' % ex)
+
+with repo.wlock(), repo.lock(), repo.transaction('test'):
+ with open(b'4', 'wb') as f:
+ f.write(b'4')
+ repo.dirstate.normal('4')
+ repo.commit('4')
+ revsbefore = len(repo.changelog)
+ repo.invalidate(clearfilecache=True)
+ revsafter = len(repo.changelog)
+ if revsbefore != revsafter:
+ print('changeset lost by repo.invalidate()')
--- a/tests/test-contrib-check-code.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-contrib-check-code.t Sat Sep 30 07:52:48 2017 -0700
@@ -213,32 +213,32 @@
[1]
$ cat > ./map-inside-gettext.py <<EOF
- > print _("map inside gettext %s" % v)
+ > print(_("map inside gettext %s" % v))
>
- > print _("concatenating " " by " " space %s" % v)
- > print _("concatenating " + " by " + " '+' %s" % v)
+ > print(_("concatenating " " by " " space %s" % v))
+ > print(_("concatenating " + " by " + " '+' %s" % v))
>
- > print _("mapping operation in different line %s"
- > % v)
+ > print(_("mapping operation in different line %s"
+ > % v))
>
- > print _(
- > "leading spaces inside of '(' %s" % v)
+ > print(_(
+ > "leading spaces inside of '(' %s" % v))
> EOF
$ "$check_code" ./map-inside-gettext.py
./map-inside-gettext.py:1:
- > print _("map inside gettext %s" % v)
+ > print(_("map inside gettext %s" % v))
don't use % inside _()
./map-inside-gettext.py:3:
- > print _("concatenating " " by " " space %s" % v)
+ > print(_("concatenating " " by " " space %s" % v))
don't use % inside _()
./map-inside-gettext.py:4:
- > print _("concatenating " + " by " + " '+' %s" % v)
+ > print(_("concatenating " + " by " + " '+' %s" % v))
don't use % inside _()
./map-inside-gettext.py:6:
- > print _("mapping operation in different line %s"
+ > print(_("mapping operation in different line %s"
don't use % inside _()
./map-inside-gettext.py:9:
- > print _(
+ > print(_(
don't use % inside _()
[1]
--- a/tests/test-convert-clonebranches.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-convert-clonebranches.t Sat Sep 30 07:52:48 2017 -0700
@@ -31,7 +31,9 @@
Miss perl... sometimes
$ cat > filter.py <<EOF
- > import sys, re
+ > from __future__ import absolute_import
+ > import re
+ > import sys
>
> r = re.compile(r'^(?:\d+|pulling from)')
> sys.stdout.writelines([l for l in sys.stdin if r.search(l)])
--- a/tests/test-convert-cvs.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-convert-cvs.t Sat Sep 30 07:52:48 2017 -0700
@@ -12,10 +12,10 @@
$ echo "convert = " >> $HGRCPATH
$ cat > cvshooks.py <<EOF
> def cvslog(ui,repo,hooktype,log):
- > print "%s hook: %d entries"%(hooktype,len(log))
+ > print("%s hook: %d entries"%(hooktype,len(log)))
>
> def cvschangesets(ui,repo,hooktype,changesets):
- > print "%s hook: %d changesets"%(hooktype,len(changesets))
+ > print("%s hook: %d changesets"%(hooktype,len(changesets)))
> EOF
$ hookpath=`pwd`
$ cat <<EOF >> $HGRCPATH
--- a/tests/test-copy-move-merge.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-copy-move-merge.t Sat Sep 30 07:52:48 2017 -0700
@@ -1,3 +1,6 @@
+Test for the full copytracing algorithm
+=======================================
+
$ hg init t
$ cd t
@@ -81,7 +84,7 @@
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
saved backup bundle to $TESTTMP/t/.hg/strip-backup/550bd84c0cd3-fc575957-backup.hg (glob)
$ hg up -qC 2
- $ hg rebase --keep -d 1 -b 2 --config extensions.rebase= --config experimental.disablecopytrace=True --config ui.interactive=True << EOF
+ $ hg rebase --keep -d 1 -b 2 --config extensions.rebase= --config experimental.copytrace=off --config ui.interactive=True << EOF
> c
> EOF
rebasing 2:add3f11052fa "other" (tip)
@@ -117,7 +120,7 @@
|
o 0 add a
- $ hg rebase -d . -b 2 --config extensions.rebase= --config experimental.disablecopytrace=True
+ $ hg rebase -d . -b 2 --config extensions.rebase= --config experimental.copytrace=off
rebasing 2:6adcf8c12e7d "copy b->x"
saved backup bundle to $TESTTMP/copydisable/.hg/strip-backup/6adcf8c12e7d-ce4b3e75-rebase.hg (glob)
$ hg up -q 3
@@ -150,7 +153,7 @@
|/
o 0 add a
- $ hg rebase -d 2 -s 3 --config extensions.rebase= --config experimental.disablecopytrace=True
+ $ hg rebase -d 2 -s 3 --config extensions.rebase= --config experimental.copytrace=off
rebasing 3:47e1a9e6273b "copy a->b (2)" (tip)
saved backup bundle to $TESTTMP/copydisable3/.hg/strip-backup/47e1a9e6273b-2d099c59-rebase.hg (glob)
--- a/tests/test-copy.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-copy.t Sat Sep 30 07:52:48 2017 -0700
@@ -15,7 +15,7 @@
$ hg status
$ hg copy a b
$ hg --config ui.portablefilenames=abort copy a con.xml
- abort: filename contains 'con', which is reserved on Windows: 'con.xml'
+ abort: filename contains 'con', which is reserved on Windows: con.xml
[255]
$ hg status
A b
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-copytrace-heuristics.t Sat Sep 30 07:52:48 2017 -0700
@@ -0,0 +1,714 @@
+Test for the heuristic copytracing algorithm
+============================================
+
+ $ cat >> $TESTTMP/copytrace.sh << '__EOF__'
+ > initclient() {
+ > cat >> $1/.hg/hgrc <<EOF
+ > [experimental]
+ > copytrace = heuristics
+ > EOF
+ > }
+ > __EOF__
+ $ . "$TESTTMP/copytrace.sh"
+
+ $ cat >> $HGRCPATH << EOF
+ > [extensions]
+ > rebase=
+ > shelve=
+ > EOF
+
+Check filename heuristics (same dirname and same basename)
+ $ hg init server
+ $ cd server
+ $ echo a > a
+ $ mkdir dir
+ $ echo a > dir/file.txt
+ $ hg addremove
+ adding a
+ adding dir/file.txt
+ $ hg ci -m initial
+ $ hg mv a b
+ $ hg mv -q dir dir2
+ $ hg ci -m 'mv a b, mv dir/ dir2/'
+ $ cd ..
+ $ hg clone -q server repo
+ $ initclient repo
+ $ cd repo
+ $ hg up -q 0
+ $ echo b > a
+ $ echo b > dir/file.txt
+ $ hg ci -qm 'mod a, mod dir/file.txt'
+
+ $ hg log -G -T 'changeset: {node}\n desc: {desc}, phase: {phase}\n'
+ @ changeset: 557f403c0afd2a3cf15d7e2fb1f1001a8b85e081
+ | desc: mod a, mod dir/file.txt, phase: draft
+ | o changeset: 928d74bc9110681920854d845c06959f6dfc9547
+ |/ desc: mv a b, mv dir/ dir2/, phase: public
+ o changeset: 3c482b16e54596fed340d05ffaf155f156cda7ee
+ desc: initial, phase: public
+
+ $ hg rebase -s . -d 1
+ rebasing 2:557f403c0afd "mod a, mod dir/file.txt" (tip)
+ merging b and a to b
+ merging dir2/file.txt and dir/file.txt to dir2/file.txt
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/557f403c0afd-9926eeff-rebase.hg (glob)
+ $ cd ..
+ $ rm -rf server
+ $ rm -rf repo
+
+Make sure filename heuristics do not when they are not related
+ $ hg init server
+ $ cd server
+ $ echo 'somecontent' > a
+ $ hg add a
+ $ hg ci -m initial
+ $ hg rm a
+ $ echo 'completelydifferentcontext' > b
+ $ hg add b
+ $ hg ci -m 'rm a, add b'
+ $ cd ..
+ $ hg clone -q server repo
+ $ initclient repo
+ $ cd repo
+ $ hg up -q 0
+ $ printf 'somecontent\nmoarcontent' > a
+ $ hg ci -qm 'mode a'
+
+ $ hg log -G -T 'changeset: {node}\n desc: {desc}, phase: {phase}\n'
+ @ changeset: d526312210b9e8f795d576a77dc643796384d86e
+ | desc: mode a, phase: draft
+ | o changeset: 46985f76c7e5e5123433527f5c8526806145650b
+ |/ desc: rm a, add b, phase: public
+ o changeset: e5b71fb099c29d9172ef4a23485aaffd497e4cc0
+ desc: initial, phase: public
+
+ $ hg rebase -s . -d 1
+ rebasing 2:d526312210b9 "mode a" (tip)
+ other [source] changed a which local [dest] deleted
+ use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ unresolved conflicts (see hg resolve, then hg rebase --continue)
+ [1]
+
+ $ cd ..
+ $ rm -rf server
+ $ rm -rf repo
+
+Test when lca didn't modified the file that was moved
+ $ hg init server
+ $ cd server
+ $ echo 'somecontent' > a
+ $ hg add a
+ $ hg ci -m initial
+ $ echo c > c
+ $ hg add c
+ $ hg ci -m randomcommit
+ $ hg mv a b
+ $ hg ci -m 'mv a b'
+ $ cd ..
+ $ hg clone -q server repo
+ $ initclient repo
+ $ cd repo
+ $ hg up -q 1
+ $ echo b > a
+ $ hg ci -qm 'mod a'
+
+ $ hg log -G -T 'changeset: {node}\n desc: {desc}, phase: {phase}\n'
+ @ changeset: 9d5cf99c3d9f8e8b05ba55421f7f56530cfcf3bc
+ | desc: mod a, phase: draft
+ | o changeset: d760186dd240fc47b91eb9f0b58b0002aaeef95d
+ |/ desc: mv a b, phase: public
+ o changeset: 48e1b6ba639d5d7fb313fa7989eebabf99c9eb83
+ | desc: randomcommit, phase: public
+ o changeset: e5b71fb099c29d9172ef4a23485aaffd497e4cc0
+ desc: initial, phase: public
+
+ $ hg rebase -s . -d 2
+ rebasing 3:9d5cf99c3d9f "mod a" (tip)
+ merging b and a to b
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/9d5cf99c3d9f-f02358cc-rebase.hg (glob)
+ $ cd ..
+ $ rm -rf server
+ $ rm -rf repo
+
+Rebase "backwards"
+ $ hg init server
+ $ cd server
+ $ echo 'somecontent' > a
+ $ hg add a
+ $ hg ci -m initial
+ $ echo c > c
+ $ hg add c
+ $ hg ci -m randomcommit
+ $ hg mv a b
+ $ hg ci -m 'mv a b'
+ $ cd ..
+ $ hg clone -q server repo
+ $ initclient repo
+ $ cd repo
+ $ hg up -q 2
+ $ echo b > b
+ $ hg ci -qm 'mod b'
+
+ $ hg log -G -T 'changeset: {node}\n desc: {desc}, phase: {phase}\n'
+ @ changeset: fbe97126b3969056795c462a67d93faf13e4d298
+ | desc: mod b, phase: draft
+ o changeset: d760186dd240fc47b91eb9f0b58b0002aaeef95d
+ | desc: mv a b, phase: public
+ o changeset: 48e1b6ba639d5d7fb313fa7989eebabf99c9eb83
+ | desc: randomcommit, phase: public
+ o changeset: e5b71fb099c29d9172ef4a23485aaffd497e4cc0
+ desc: initial, phase: public
+
+ $ hg rebase -s . -d 0
+ rebasing 3:fbe97126b396 "mod b" (tip)
+ merging a and b to a
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/fbe97126b396-cf5452a1-rebase.hg (glob)
+ $ cd ..
+ $ rm -rf server
+ $ rm -rf repo
+
+Rebase draft commit on top of draft commit
+ $ hg init repo
+ $ initclient repo
+ $ cd repo
+ $ echo 'somecontent' > a
+ $ hg add a
+ $ hg ci -m initial
+ $ hg mv a b
+ $ hg ci -m 'mv a b'
+ $ hg up -q ".^"
+ $ echo b > a
+ $ hg ci -qm 'mod a'
+
+ $ hg log -G -T 'changeset: {node}\n desc: {desc}, phase: {phase}\n'
+ @ changeset: 5268f05aa1684cfb5741e9eb05eddcc1c5ee7508
+ | desc: mod a, phase: draft
+ | o changeset: 542cb58df733ee48fa74729bd2cdb94c9310d362
+ |/ desc: mv a b, phase: draft
+ o changeset: e5b71fb099c29d9172ef4a23485aaffd497e4cc0
+ desc: initial, phase: draft
+
+ $ hg rebase -s . -d 1
+ rebasing 2:5268f05aa168 "mod a" (tip)
+ merging b and a to b
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/5268f05aa168-284f6515-rebase.hg (glob)
+ $ cd ..
+ $ rm -rf server
+ $ rm -rf repo
+
+Check a few potential move candidates
+ $ hg init server
+ $ initclient server
+ $ cd server
+ $ mkdir dir
+ $ echo a > dir/a
+ $ hg add dir/a
+ $ hg ci -qm initial
+ $ hg mv dir/a dir/b
+ $ hg ci -qm 'mv dir/a dir/b'
+ $ mkdir dir2
+ $ echo b > dir2/a
+ $ hg add dir2/a
+ $ hg ci -qm 'create dir2/a'
+ $ cd ..
+ $ hg clone -q server repo
+ $ initclient repo
+ $ cd repo
+ $ hg up -q 0
+ $ echo b > dir/a
+ $ hg ci -qm 'mod dir/a'
+
+ $ hg log -G -T 'changeset: {node}\n desc: {desc}, phase: {phase}\n'
+ @ changeset: 6b2f4cece40fd320f41229f23821256ffc08efea
+ | desc: mod dir/a, phase: draft
+ | o changeset: 4494bf7efd2e0dfdd388e767fb913a8a3731e3fa
+ | | desc: create dir2/a, phase: public
+ | o changeset: b1784dfab6ea6bfafeb11c0ac50a2981b0fe6ade
+ |/ desc: mv dir/a dir/b, phase: public
+ o changeset: 36859b8907c513a3a87ae34ba5b1e7eea8c20944
+ desc: initial, phase: public
+
+ $ hg rebase -s . -d 2
+ rebasing 3:6b2f4cece40f "mod dir/a" (tip)
+ merging dir/b and dir/a to dir/b
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/6b2f4cece40f-503efe60-rebase.hg (glob)
+ $ cd ..
+ $ rm -rf server
+ $ rm -rf repo
+
+Move file in one branch and delete it in another
+ $ hg init server
+ $ initclient server
+ $ cd server
+ $ echo a > a
+ $ hg add a
+ $ hg ci -m initial
+ $ cd ..
+ $ hg clone -q server repo
+ $ initclient repo
+ $ cd repo
+ $ hg mv a b
+ $ hg ci -m 'mv a b'
+ $ hg up -q ".^"
+ $ hg rm a
+ $ hg ci -m 'del a'
+ created new head
+
+ $ hg log -G -T 'changeset: {node}\n desc: {desc}, phase: {phase}\n'
+ @ changeset: 7d61ee3b1e48577891a072024968428ba465c47b
+ | desc: del a, phase: draft
+ | o changeset: 472e38d57782172f6c6abed82a94ca0d998c3a22
+ |/ desc: mv a b, phase: draft
+ o changeset: 1451231c87572a7d3f92fc210b4b35711c949a98
+ desc: initial, phase: public
+
+ $ hg rebase -s 1 -d 2
+ rebasing 1:472e38d57782 "mv a b"
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/472e38d57782-17d50e29-rebase.hg (glob)
+ $ hg up -q c492ed3c7e35dcd1dc938053b8adf56e2cfbd062
+ $ ls
+ b
+ $ cd ..
+ $ rm -rf server
+ $ rm -rf repo
+
+Move a directory in draft branch
+ $ hg init server
+ $ initclient server
+ $ cd server
+ $ mkdir dir
+ $ echo a > dir/a
+ $ hg add dir/a
+ $ hg ci -qm initial
+ $ cd ..
+ $ hg clone -q server repo
+ $ initclient repo
+ $ cd repo
+ $ echo b > dir/a
+ $ hg ci -qm 'mod dir/a'
+ $ hg up -q ".^"
+ $ hg mv -q dir/ dir2
+ $ hg ci -qm 'mv dir/ dir2/'
+
+ $ hg log -G -T 'changeset: {node}\n desc: {desc}, phase: {phase}\n'
+ @ changeset: a33d80b6e352591dfd82784e1ad6cdd86b25a239
+ | desc: mv dir/ dir2/, phase: draft
+ | o changeset: 6b2f4cece40fd320f41229f23821256ffc08efea
+ |/ desc: mod dir/a, phase: draft
+ o changeset: 36859b8907c513a3a87ae34ba5b1e7eea8c20944
+ desc: initial, phase: public
+
+ $ hg rebase -s . -d 1
+ rebasing 2:a33d80b6e352 "mv dir/ dir2/" (tip)
+ merging dir/a and dir2/a to dir2/a
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/a33d80b6e352-fecb9ada-rebase.hg (glob)
+ $ cd ..
+ $ rm -rf server
+ $ rm -rf repo
+
+Move file twice and rebase mod on top of moves
+ $ hg init server
+ $ initclient server
+ $ cd server
+ $ echo a > a
+ $ hg add a
+ $ hg ci -m initial
+ $ hg mv a b
+ $ hg ci -m 'mv a b'
+ $ hg mv b c
+ $ hg ci -m 'mv b c'
+ $ cd ..
+ $ hg clone -q server repo
+ $ initclient repo
+ $ cd repo
+ $ hg up -q 0
+ $ echo c > a
+ $ hg ci -m 'mod a'
+ created new head
+ $ hg log -G -T 'changeset: {node}\n desc: {desc}, phase: {phase}\n'
+ @ changeset: d413169422167a3fa5275fc5d71f7dea9f5775f3
+ | desc: mod a, phase: draft
+ | o changeset: d3efd280421d24f9f229997c19e654761c942a71
+ | | desc: mv b c, phase: public
+ | o changeset: 472e38d57782172f6c6abed82a94ca0d998c3a22
+ |/ desc: mv a b, phase: public
+ o changeset: 1451231c87572a7d3f92fc210b4b35711c949a98
+ desc: initial, phase: public
+ $ hg rebase -s . -d 2
+ rebasing 3:d41316942216 "mod a" (tip)
+ merging c and a to c
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d41316942216-2b5949bc-rebase.hg (glob)
+
+ $ cd ..
+ $ rm -rf server
+ $ rm -rf repo
+
+Move file twice and rebase moves on top of mods
+ $ hg init server
+ $ initclient server
+ $ cd server
+ $ echo a > a
+ $ hg add a
+ $ hg ci -m initial
+ $ cd ..
+ $ hg clone -q server repo
+ $ initclient repo
+ $ cd repo
+ $ hg mv a b
+ $ hg ci -m 'mv a b'
+ $ hg mv b c
+ $ hg ci -m 'mv b c'
+ $ hg up -q 0
+ $ echo c > a
+ $ hg ci -m 'mod a'
+ created new head
+ $ hg log -G -T 'changeset: {node}\n desc: {desc}, phase: {phase}\n'
+ @ changeset: d413169422167a3fa5275fc5d71f7dea9f5775f3
+ | desc: mod a, phase: draft
+ | o changeset: d3efd280421d24f9f229997c19e654761c942a71
+ | | desc: mv b c, phase: draft
+ | o changeset: 472e38d57782172f6c6abed82a94ca0d998c3a22
+ |/ desc: mv a b, phase: draft
+ o changeset: 1451231c87572a7d3f92fc210b4b35711c949a98
+ desc: initial, phase: public
+ $ hg rebase -s 1 -d .
+ rebasing 1:472e38d57782 "mv a b"
+ merging a and b to b
+ rebasing 2:d3efd280421d "mv b c"
+ merging b and c to c
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/472e38d57782-ab8d3c58-rebase.hg (glob)
+
+ $ cd ..
+ $ rm -rf server
+ $ rm -rf repo
+
+Move one file and add another file in the same folder in one branch, modify file in another branch
+ $ hg init server
+ $ initclient server
+ $ cd server
+ $ echo a > a
+ $ hg add a
+ $ hg ci -m initial
+ $ hg mv a b
+ $ hg ci -m 'mv a b'
+ $ echo c > c
+ $ hg add c
+ $ hg ci -m 'add c'
+ $ cd ..
+ $ hg clone -q server repo
+ $ initclient repo
+ $ cd repo
+ $ hg up -q 0
+ $ echo b > a
+ $ hg ci -m 'mod a'
+ created new head
+
+ $ hg log -G -T 'changeset: {node}\n desc: {desc}, phase: {phase}\n'
+ @ changeset: ef716627c70bf4ca0bdb623cfb0d6fe5b9acc51e
+ | desc: mod a, phase: draft
+ | o changeset: b1a6187e79fbce851bb584eadcb0cc4a80290fd9
+ | | desc: add c, phase: public
+ | o changeset: 472e38d57782172f6c6abed82a94ca0d998c3a22
+ |/ desc: mv a b, phase: public
+ o changeset: 1451231c87572a7d3f92fc210b4b35711c949a98
+ desc: initial, phase: public
+
+ $ hg rebase -s . -d 2
+ rebasing 3:ef716627c70b "mod a" (tip)
+ merging b and a to b
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/ef716627c70b-24681561-rebase.hg (glob)
+ $ ls
+ b
+ c
+ $ cat b
+ b
+
+Merge test
+ $ hg init server
+ $ cd server
+ $ echo a > a
+ $ hg add a
+ $ hg ci -m initial
+ $ echo b > a
+ $ hg ci -m 'modify a'
+ $ hg up -q 0
+ $ hg mv a b
+ $ hg ci -m 'mv a b'
+ created new head
+ $ cd ..
+ $ hg clone -q server repo
+ $ initclient repo
+ $ cd repo
+ $ hg up -q 2
+
+ $ hg log -G -T 'changeset: {node}\n desc: {desc}, phase: {phase}\n'
+ @ changeset: 472e38d57782172f6c6abed82a94ca0d998c3a22
+ | desc: mv a b, phase: public
+ | o changeset: b0357b07f79129a3d08a68621271ca1352ae8a09
+ |/ desc: modify a, phase: public
+ o changeset: 1451231c87572a7d3f92fc210b4b35711c949a98
+ desc: initial, phase: public
+
+ $ hg merge 1
+ merging b and a to b
+ 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ hg ci -m merge
+ $ ls
+ b
+ $ cd ..
+ $ rm -rf server
+ $ rm -rf repo
+
+Copy and move file
+ $ hg init server
+ $ initclient server
+ $ cd server
+ $ echo a > a
+ $ hg add a
+ $ hg ci -m initial
+ $ hg cp a c
+ $ hg mv a b
+ $ hg ci -m 'cp a c, mv a b'
+ $ cd ..
+ $ hg clone -q server repo
+ $ initclient repo
+ $ cd repo
+ $ hg up -q 0
+ $ echo b > a
+ $ hg ci -m 'mod a'
+ created new head
+
+ $ hg log -G -T 'changeset: {node}\n desc: {desc}, phase: {phase}\n'
+ @ changeset: ef716627c70bf4ca0bdb623cfb0d6fe5b9acc51e
+ | desc: mod a, phase: draft
+ | o changeset: 4fc3fd13fbdb89ada6b75bfcef3911a689a0dde8
+ |/ desc: cp a c, mv a b, phase: public
+ o changeset: 1451231c87572a7d3f92fc210b4b35711c949a98
+ desc: initial, phase: public
+
+ $ hg rebase -s . -d 1
+ rebasing 2:ef716627c70b "mod a" (tip)
+ merging b and a to b
+ merging c and a to c
+ saved backup bundle to $TESTTMP/repo/repo/.hg/strip-backup/ef716627c70b-24681561-rebase.hg (glob)
+ $ ls
+ b
+ c
+ $ cat b
+ b
+ $ cat c
+ b
+ $ cd ..
+ $ rm -rf server
+ $ rm -rf repo
+
+Do a merge commit with many consequent moves in one branch
+ $ hg init server
+ $ initclient server
+ $ cd server
+ $ echo a > a
+ $ hg add a
+ $ hg ci -m initial
+ $ echo b > a
+ $ hg ci -qm 'mod a'
+ $ cd ..
+ $ hg clone -q server repo
+ $ initclient repo
+ $ cd repo
+ $ hg up -q ".^"
+ $ hg mv a b
+ $ hg ci -qm 'mv a b'
+ $ hg mv b c
+ $ hg ci -qm 'mv b c'
+ $ hg up -q 1
+ $ hg log -G -T 'changeset: {node}\n desc: {desc}, phase: {phase}\n'
+ o changeset: d3efd280421d24f9f229997c19e654761c942a71
+ | desc: mv b c, phase: draft
+ o changeset: 472e38d57782172f6c6abed82a94ca0d998c3a22
+ | desc: mv a b, phase: draft
+ | @ changeset: ef716627c70bf4ca0bdb623cfb0d6fe5b9acc51e
+ |/ desc: mod a, phase: public
+ o changeset: 1451231c87572a7d3f92fc210b4b35711c949a98
+ desc: initial, phase: public
+
+ $ hg merge 3
+ merging a and c to c
+ 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ hg ci -qm 'merge'
+ $ hg log -G -T 'changeset: {node}\n desc: {desc}, phase: {phase}\n'
+ @ changeset: cd29b0d08c0f39bfed4cde1b40e30f419db0c825
+ |\ desc: merge, phase: draft
+ | o changeset: d3efd280421d24f9f229997c19e654761c942a71
+ | | desc: mv b c, phase: draft
+ | o changeset: 472e38d57782172f6c6abed82a94ca0d998c3a22
+ | | desc: mv a b, phase: draft
+ o | changeset: ef716627c70bf4ca0bdb623cfb0d6fe5b9acc51e
+ |/ desc: mod a, phase: public
+ o changeset: 1451231c87572a7d3f92fc210b4b35711c949a98
+ desc: initial, phase: public
+ $ ls
+ c
+ $ cd ..
+ $ rm -rf server
+ $ rm -rf repo
+
+Test shelve/unshelve
+ $ hg init server
+ $ initclient server
+ $ cd server
+ $ echo a > a
+ $ hg add a
+ $ hg ci -m initial
+ $ cd ..
+ $ hg clone -q server repo
+ $ initclient repo
+ $ cd repo
+ $ echo b > a
+ $ hg shelve
+ shelved as default
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg mv a b
+ $ hg ci -m 'mv a b'
+
+ $ hg log -G -T 'changeset: {node}\n desc: {desc}, phase: {phase}\n'
+ @ changeset: 472e38d57782172f6c6abed82a94ca0d998c3a22
+ | desc: mv a b, phase: draft
+ o changeset: 1451231c87572a7d3f92fc210b4b35711c949a98
+ desc: initial, phase: public
+ $ hg unshelve
+ unshelving change 'default'
+ rebasing shelved changes
+ rebasing 2:45f63161acea "changes to: initial" (tip)
+ merging b and a to b
+ $ ls
+ b
+ $ cat b
+ b
+ $ cd ..
+ $ rm -rf server
+ $ rm -rf repo
+
+Test full copytrace ability on draft branch
+-------------------------------------------
+
+File directory and base name changed in same move
+ $ hg init repo
+ $ initclient repo
+ $ mkdir repo/dir1
+ $ cd repo/dir1
+ $ echo a > a
+ $ hg add a
+ $ hg ci -qm initial
+ $ cd ..
+ $ hg mv -q dir1 dir2
+ $ hg mv dir2/a dir2/b
+ $ hg ci -qm 'mv a b; mv dir1 dir2'
+ $ hg up -q '.^'
+ $ cd dir1
+ $ echo b >> a
+ $ cd ..
+ $ hg ci -qm 'mod a'
+
+ $ hg log -G -T 'changeset {node}\n desc {desc}, phase: {phase}\n'
+ @ changeset 6207d2d318e710b882e3d5ada2a89770efc42c96
+ | desc mod a, phase: draft
+ | o changeset abffdd4e3dfc04bc375034b970299b2a309a1cce
+ |/ desc mv a b; mv dir1 dir2, phase: draft
+ o changeset 81973cd24b58db2fdf18ce3d64fb2cc3284e9ab3
+ desc initial, phase: draft
+
+ $ hg rebase -s . -d 1
+ rebasing 2:6207d2d318e7 "mod a" (tip)
+ merging dir2/b and dir1/a to dir2/b
+ saved backup bundle to $TESTTMP/repo/repo/.hg/strip-backup/6207d2d318e7-1c9779ad-rebase.hg (glob)
+ $ cat dir2/b
+ a
+ b
+ $ cd ..
+ $ rm -rf server
+ $ rm -rf repo
+
+Move directory in one merge parent, while adding file to original directory
+in other merge parent. File moved on rebase.
+ $ hg init repo
+ $ initclient repo
+ $ mkdir repo/dir1
+ $ cd repo/dir1
+ $ echo dummy > dummy
+ $ hg add dummy
+ $ cd ..
+ $ hg ci -qm initial
+ $ cd dir1
+ $ echo a > a
+ $ hg add a
+ $ cd ..
+ $ hg ci -qm 'hg add dir1/a'
+ $ hg up -q '.^'
+ $ hg mv -q dir1 dir2
+ $ hg ci -qm 'mv dir1 dir2'
+
+ $ hg log -G -T 'changeset {node}\n desc {desc}, phase: {phase}\n'
+ @ changeset e8919e7df8d036e07b906045eddcd4a42ff1915f
+ | desc mv dir1 dir2, phase: draft
+ | o changeset 7c7c6f339be00f849c3cb2df738ca91db78b32c8
+ |/ desc hg add dir1/a, phase: draft
+ o changeset a235dcce55dcf42034c4e374cb200662d0bb4a13
+ desc initial, phase: draft
+
+ $ hg rebase -s . -d 1
+ rebasing 2:e8919e7df8d0 "mv dir1 dir2" (tip)
+ saved backup bundle to $TESTTMP/repo/repo/.hg/strip-backup/e8919e7df8d0-f62fab62-rebase.hg (glob)
+ $ ls dir2
+ a
+ dummy
+ $ rm -rf server
+ $ rm -rf repo
+
+Testing the sourcecommitlimit config
+
+ $ hg init repo
+ $ initclient repo
+ $ cd repo
+ $ echo a > a
+ $ hg ci -Aqm "added a"
+ $ echo "more things" >> a
+ $ hg ci -qm "added more things to a"
+ $ hg up 0
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo b > b
+ $ hg ci -Aqm "added b"
+ $ mkdir foo
+ $ hg mv a foo/bar
+ $ hg ci -m "Moved a to foo/bar"
+ $ hg log -G -T 'changeset {node}\n desc {desc}, phase: {phase}\n'
+ @ changeset b4b0f7880e500b5c364a5f07b4a2b167de7a6fb0
+ | desc Moved a to foo/bar, phase: draft
+ o changeset 5f6d8a4bf34ab274ccc9f631c2536964b8a3666d
+ | desc added b, phase: draft
+ | o changeset 8b6e13696c38e8445a759516474640c2f8dddef6
+ |/ desc added more things to a, phase: draft
+ o changeset 9092f1db7931481f93b37d5c9fbcfc341bcd7318
+ desc added a, phase: draft
+
+When the sourcecommitlimit is small and we have more drafts, we use heuristics only
+
+ $ hg rebase -s 8b6e13696 -d . --config experimental.copytrace.sourcecommitlimit=0
+ rebasing 1:8b6e13696c38 "added more things to a"
+ other [source] changed a which local [dest] deleted
+ use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
+ unresolved conflicts (see hg resolve, then hg rebase --continue)
+ [1]
+
+But when we have "sourcecommitlimit > (no. of drafts from base to c1)", we do
+fullcopytracing
+
+ $ hg rebase --abort
+ rebase aborted
+ $ hg rebase -s 8b6e13696 -d .
+ rebasing 1:8b6e13696c38 "added more things to a"
+ merging foo/bar and a to foo/bar
+ saved backup bundle to $TESTTMP/repo/repo/repo/.hg/strip-backup/8b6e13696c38-fc14ac83-rebase.hg (glob)
+ $ cd ..
+ $ rm -rf repo
--- a/tests/test-debugbundle.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-debugbundle.t Sat Sep 30 07:52:48 2017 -0700
@@ -31,7 +31,7 @@
$ hg debugbundle bundle2.hg
Stream params: {}
- changegroup -- "sortdict([('version', '02'), ('nbchanges', '2')])"
+ changegroup -- {nbchanges: 2, version: 02}
0e067c57feba1a5694ca4844f05588bb1bf82342
991a3460af53952d10ec8a295d3d2cc2e5fa9690
@@ -56,7 +56,7 @@
$ hg debugbundle --all bundle2.hg
Stream params: {}
- changegroup -- "sortdict([('version', '02'), ('nbchanges', '2')])"
+ changegroup -- {nbchanges: 2, version: 02}
format: id, p1, p2, cset, delta base, len(delta)
changelog
--- a/tests/test-debugcommands.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-debugcommands.t Sat Sep 30 07:52:48 2017 -0700
@@ -132,25 +132,27 @@
Test internal debugstacktrace command
$ cat > debugstacktrace.py << EOF
- > from mercurial.util import debugstacktrace, dst, sys
+ > from __future__ import absolute_import
+ > import sys
+ > from mercurial import util
> def f():
- > debugstacktrace(f=sys.stdout)
+ > util.debugstacktrace(f=sys.stdout)
> g()
> def g():
- > dst('hello from g\\n', skip=1)
+ > util.dst('hello from g\\n', skip=1)
> h()
> def h():
- > dst('hi ...\\nfrom h hidden in g', 1, depth=2)
+ > util.dst('hi ...\\nfrom h hidden in g', 1, depth=2)
> f()
> EOF
$ $PYTHON debugstacktrace.py
stacktrace at:
- debugstacktrace.py:10 in * (glob)
- debugstacktrace.py:3 in f
+ debugstacktrace.py:12 in * (glob)
+ debugstacktrace.py:5 in f
hello from g at:
- debugstacktrace.py:10 in * (glob)
- debugstacktrace.py:4 in f
+ debugstacktrace.py:12 in * (glob)
+ debugstacktrace.py:6 in f
hi ...
from h hidden in g at:
- debugstacktrace.py:4 in f
- debugstacktrace.py:7 in g
+ debugstacktrace.py:6 in f
+ debugstacktrace.py:9 in g
--- a/tests/test-demandimport.py Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-demandimport.py Sat Sep 30 07:52:48 2017 -0700
@@ -1,4 +1,4 @@
-from __future__ import print_function
+from __future__ import absolute_import, print_function
from mercurial import demandimport
demandimport.enable()
--- a/tests/test-devel-warnings.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-devel-warnings.t Sat Sep 30 07:52:48 2017 -0700
@@ -242,8 +242,8 @@
> EOF
$ hg --config "extensions.buggyconfig=${TESTTMP}/buggyconfig.py" buggyconfig
- devel-warn: extension 'buggyconfig' overwrite config item 'ui.interactive' at: */mercurial/extensions.py:* (loadall) (glob)
- devel-warn: extension 'buggyconfig' overwrite config item 'ui.quiet' at: */mercurial/extensions.py:* (loadall) (glob)
+ devel-warn: extension 'buggyconfig' overwrite config item 'ui.interactive' at: */mercurial/extensions.py:* (_loadextra) (glob)
+ devel-warn: extension 'buggyconfig' overwrite config item 'ui.quiet' at: */mercurial/extensions.py:* (_loadextra) (glob)
devel-warn: specifying a default value for a registered config item: 'ui.quiet' 'False' at: $TESTTMP/buggyconfig.py:* (cmdbuggyconfig) (glob)
devel-warn: specifying a default value for a registered config item: 'ui.interactive' 'None' at: $TESTTMP/buggyconfig.py:* (cmdbuggyconfig) (glob)
devel-warn: specifying a default value for a registered config item: 'test.some' 'foo' at: $TESTTMP/buggyconfig.py:* (cmdbuggyconfig) (glob)
--- a/tests/test-diff-ignore-whitespace.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-diff-ignore-whitespace.t Sat Sep 30 07:52:48 2017 -0700
@@ -407,8 +407,23 @@
+goodbye\r (no-eol) (esc)
world
+Test \r (carriage return) as used in "DOS" line endings:
+
+ $ printf 'hello world \r\n\t\ngoodbye world\n' >foo
+
+ $ hg ndiff --ignore-space-at-eol
+ diff -r 540c40a65b78 foo
+ --- a/foo
+ +++ b/foo
+ @@ -1,2 +1,3 @@
+ hello world
+ +\t (esc)
+ goodbye world
+
No completely blank lines to ignore:
+ $ printf 'hello world\r\n\r\ngoodbye\rworld\n' >foo
+
$ hg ndiff --ignore-blank-lines
diff -r 540c40a65b78 foo
--- a/foo
--- a/tests/test-dirstate.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-dirstate.t Sat Sep 30 07:52:48 2017 -0700
@@ -66,7 +66,12 @@
coherent (issue4353)
$ cat > ../dirstateexception.py <<EOF
- > from mercurial import merge, extensions, error
+ > from __future__ import absolute_import
+ > from mercurial import (
+ > error,
+ > extensions,
+ > merge,
+ > )
>
> def wraprecordupdates(orig, repo, actions, branchmerge):
> raise error.Abort("simulated error while recording dirstateupdates")
--- a/tests/test-doctest.py Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-doctest.py Sat Sep 30 07:52:48 2017 -0700
@@ -4,6 +4,7 @@
import doctest
import os
+import re
import sys
ispy3 = (sys.version_info[0] >= 3)
@@ -11,15 +12,36 @@
if 'TERM' in os.environ:
del os.environ['TERM']
+class py3docchecker(doctest.OutputChecker):
+ def check_output(self, want, got, optionflags):
+ want2 = re.sub(r'''\bu(['"])(.*?)\1''', r'\1\2\1', want) # py2: u''
+ got2 = re.sub(r'''\bb(['"])(.*?)\1''', r'\1\2\1', got) # py3: b''
+ # py3: <exc.name>: b'<msg>' -> <name>: <msg>
+ # <exc.name>: <others> -> <name>: <others>
+ got2 = re.sub(r'''^mercurial\.\w+\.(\w+): (['"])(.*?)\2''', r'\1: \3',
+ got2, re.MULTILINE)
+ got2 = re.sub(r'^mercurial\.\w+\.(\w+): ', r'\1: ', got2, re.MULTILINE)
+ return any(doctest.OutputChecker.check_output(self, w, g, optionflags)
+ for w, g in [(want, got), (want2, got2)])
+
# TODO: migrate doctests to py3 and enable them on both versions
-def testmod(name, optionflags=0, testtarget=None, py2=True, py3=False):
+def testmod(name, optionflags=0, testtarget=None, py2=True, py3=True):
if not (not ispy3 and py2 or ispy3 and py3):
return
__import__(name)
mod = sys.modules[name]
if testtarget is not None:
mod = getattr(mod, testtarget)
- doctest.testmod(mod, optionflags=optionflags)
+
+ # minimal copy of doctest.testmod()
+ finder = doctest.DocTestFinder()
+ checker = None
+ if ispy3:
+ checker = py3docchecker()
+ runner = doctest.DocTestRunner(checker=checker, optionflags=optionflags)
+ for test in finder.find(mod, name):
+ runner.run(test)
+ runner.summarize()
testmod('mercurial.changegroup')
testmod('mercurial.changelog')
@@ -38,7 +60,7 @@
testmod('mercurial.patch')
testmod('mercurial.pathutil')
testmod('mercurial.parser')
-testmod('mercurial.pycompat', py3=True)
+testmod('mercurial.pycompat')
testmod('mercurial.revsetlang')
testmod('mercurial.smartset')
testmod('mercurial.store')
@@ -55,3 +77,5 @@
testmod('hgext.convert.p4')
testmod('hgext.convert.subversion')
testmod('hgext.mq')
+# Helper scripts in tests/ that have doctests:
+testmod('drawdag')
--- a/tests/test-drawdag.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-drawdag.t Sat Sep 30 07:52:48 2017 -0700
@@ -2,7 +2,7 @@
> [extensions]
> drawdag=$TESTDIR/drawdag.py
> [experimental]
- > evolution=all
+ > stabilization=all
> EOF
$ reinit () {
@@ -227,8 +227,46 @@
o A 426bada5c67598ca65036d57d9e4b64b0c1ce7a0
$ hg debugobsolete
- 112478962961147124edd43549aedd1a335e44bf 7fb047a69f220c21711122dfd94305a9efb60cba 64a8289d249234b9886244d379f15e6b650b28e3 711f53bbef0bebd12eb6f0511d5e2e998b984846 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
- 26805aba1e600a82e93661149f2313866a221a7b be0ef73c17ade3fc89dc41701eb9fc3a91b58282 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
- be0ef73c17ade3fc89dc41701eb9fc3a91b58282 575c4b5ec114d64b681d33f8792853568bfb2b2c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
- 64a8289d249234b9886244d379f15e6b650b28e3 0 {7fb047a69f220c21711122dfd94305a9efb60cba} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
- 58e6b987bf7045fcd9c54f496396ca1d1fc81047 0 {575c4b5ec114d64b681d33f8792853568bfb2b2c} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 112478962961147124edd43549aedd1a335e44bf 7fb047a69f220c21711122dfd94305a9efb60cba 64a8289d249234b9886244d379f15e6b650b28e3 711f53bbef0bebd12eb6f0511d5e2e998b984846 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'split', 'user': 'test'}
+ 26805aba1e600a82e93661149f2313866a221a7b be0ef73c17ade3fc89dc41701eb9fc3a91b58282 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'replace', 'user': 'test'}
+ be0ef73c17ade3fc89dc41701eb9fc3a91b58282 575c4b5ec114d64b681d33f8792853568bfb2b2c 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'replace', 'user': 'test'}
+ 64a8289d249234b9886244d379f15e6b650b28e3 0 {7fb047a69f220c21711122dfd94305a9efb60cba} (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'prune', 'user': 'test'}
+ 58e6b987bf7045fcd9c54f496396ca1d1fc81047 0 {575c4b5ec114d64b681d33f8792853568bfb2b2c} (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'prune', 'user': 'test'}
+
+Change file contents via comments
+
+ $ reinit
+ $ hg debugdrawdag <<'EOS'
+ > C # A/dir1/a = 1\n2
+ > |\ # B/dir2/b = 34
+ > A B # C/dir1/c = 5
+ > # C/dir2/c = 6
+ > # C/A = a
+ > # C/B = b
+ > EOS
+
+ $ hg log -G -T '{desc} {files}'
+ o C A B dir1/c dir2/c
+ |\
+ | o B B dir2/b
+ |
+ o A A dir1/a
+
+ $ for f in `hg files -r C`; do
+ > echo FILE "$f"
+ > hg cat -r C "$f"
+ > echo
+ > done
+ FILE A
+ a
+ FILE B
+ b
+ FILE dir1/a (glob)
+ 1
+ 2
+ FILE dir1/c (glob)
+ 5
+ FILE dir2/b (glob)
+ 34
+ FILE dir2/c (glob)
+ 6
--- a/tests/test-duplicateoptions.py Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-duplicateoptions.py Sat Sep 30 07:52:48 2017 -0700
@@ -6,18 +6,18 @@
ui as uimod,
)
-ignore = {'highlight', 'win32text', 'factotum'}
+ignore = {b'highlight', b'win32text', b'factotum'}
if os.name != 'nt':
- ignore.add('win32mbcs')
+ ignore.add(b'win32mbcs')
disabled = [ext for ext in extensions.disabled().keys() if ext not in ignore]
-hgrc = open(os.environ["HGRCPATH"], 'w')
-hgrc.write('[extensions]\n')
+hgrc = open(os.environ["HGRCPATH"], 'wb')
+hgrc.write(b'[extensions]\n')
for ext in disabled:
- hgrc.write(ext + '=\n')
+ hgrc.write(ext + b'=\n')
hgrc.close()
@@ -30,7 +30,7 @@
option[0] and globalshort.add(option[0])
option[1] and globallong.add(option[1])
-for cmd, entry in commands.table.iteritems():
+for cmd, entry in commands.table.items():
seenshort = globalshort.copy()
seenlong = globallong.copy()
for option in entry[1]:
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-editor-filename.t Sat Sep 30 07:52:48 2017 -0700
@@ -0,0 +1,61 @@
+Test temp file used with an editor has the expected suffix.
+
+ $ hg init
+
+Create an editor that writes its arguments to stdout and set it to $HGEDITOR.
+
+ $ cat > editor.sh << EOF
+ > echo "\$@"
+ > exit 1
+ > EOF
+ $ hg add editor.sh
+ $ HGEDITOR="sh $TESTTMP/editor.sh"
+ $ export HGEDITOR
+
+Verify that the path for a commit editor has the expected suffix.
+
+ $ hg commit
+ *.commit.hg.txt (glob)
+ abort: edit failed: sh exited with status 1
+ [255]
+
+Verify that the path for a histedit editor has the expected suffix.
+
+ $ cat >> $HGRCPATH <<EOF
+ > [extensions]
+ > rebase=
+ > histedit=
+ > EOF
+ $ hg commit --message 'At least one commit for histedit.'
+ $ hg histedit
+ *.histedit.hg.txt (glob)
+ abort: edit failed: sh exited with status 1
+ [255]
+
+Verify that when performing an action that has the side-effect of creating an
+editor for a diff, the file ends in .diff.
+
+ $ echo 1 > one
+ $ echo 2 > two
+ $ hg add
+ adding one
+ adding two
+ $ hg commit --interactive --config ui.interactive=true --config ui.interface=text << EOF
+ > y
+ > e
+ > q
+ > EOF
+ diff --git a/one b/one
+ new file mode 100644
+ examine changes to 'one'? [Ynesfdaq?] y
+
+ @@ -0,0 +1,1 @@
+ +1
+ record change 1/2 to 'one'? [Ynesfdaq?] e
+
+ *.diff (glob)
+ editor exited with exit code 1
+ record change 1/2 to 'one'? [Ynesfdaq?] q
+
+ abort: user quit
+ [255]
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-encoding-func.py Sat Sep 30 07:52:48 2017 -0700
@@ -0,0 +1,45 @@
+from __future__ import absolute_import
+
+import unittest
+
+from mercurial import (
+ encoding,
+)
+
+class IsasciistrTest(unittest.TestCase):
+ asciistrs = [
+ b'a',
+ b'ab',
+ b'abc',
+ b'abcd',
+ b'abcde',
+ b'abcdefghi',
+ b'abcd\0fghi',
+ ]
+
+ def testascii(self):
+ for s in self.asciistrs:
+ self.assertTrue(encoding.isasciistr(s))
+
+ def testnonasciichar(self):
+ for s in self.asciistrs:
+ for i in range(len(s)):
+ t = bytearray(s)
+ t[i] |= 0x80
+ self.assertFalse(encoding.isasciistr(bytes(t)))
+
+class LocalEncodingTest(unittest.TestCase):
+ def testasciifastpath(self):
+ s = b'\0' * 100
+ self.assertTrue(s is encoding.tolocal(s))
+ self.assertTrue(s is encoding.fromlocal(s))
+
+class Utf8bEncodingTest(unittest.TestCase):
+ def testasciifastpath(self):
+ s = b'\0' * 100
+ self.assertTrue(s is encoding.toutf8b(s))
+ self.assertTrue(s is encoding.fromutf8b(s))
+
+if __name__ == '__main__':
+ import silenttestrunner
+ silenttestrunner.main(__name__)
--- a/tests/test-eol.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-eol.t Sat Sep 30 07:52:48 2017 -0700
@@ -8,15 +8,17 @@
Set up helpers
$ cat > switch-eol.py <<EOF
+ > from __future__ import absolute_import
+ > import os
> import sys
> try:
- > import os, msvcrt
+ > import msvcrt
> msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
> msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
> except ImportError:
> pass
> (old, new) = sys.argv[1] == 'LF' and ('\n', '\r\n') or ('\r\n', '\n')
- > print "%% switching encoding from %r to %r" % (old, new)
+ > print("%% switching encoding from %r to %r" % (old, new))
> for path in sys.argv[2:]:
> data = file(path, 'rb').read()
> data = data.replace(old, new)
--- a/tests/test-extension.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-extension.t Sat Sep 30 07:52:48 2017 -0700
@@ -75,13 +75,13 @@
$ cat > foo.py <<EOF
> import os
> name = os.path.basename(__file__).rsplit('.', 1)[0]
- > print "1) %s imported" % name
+ > print("1) %s imported" % name)
> def uisetup(ui):
- > print "2) %s uisetup" % name
+ > print("2) %s uisetup" % name)
> def extsetup():
- > print "3) %s extsetup" % name
+ > print("3) %s extsetup" % name)
> def reposetup(ui, repo):
- > print "4) %s reposetup" % name
+ > print("4) %s reposetup" % name)
>
> # custom predicate to check registration of functions at loading
> from mercurial import (
@@ -172,7 +172,7 @@
$ cat > loadabs.py <<EOF
> import mod.ambigabs as ambigabs
> def extsetup():
- > print 'ambigabs.s=%s' % ambigabs.s
+ > print('ambigabs.s=%s' % ambigabs.s)
> EOF
$ (PYTHONPATH=${PYTHONPATH}${PATHSEP}${TESTTMP}/libroot; hg --config extensions.loadabs=loadabs.py root)
ambigabs.s=libroot/ambig.py
@@ -186,7 +186,7 @@
$ cat > loadrel.py <<EOF
> import mod.ambigrel as ambigrel
> def extsetup():
- > print 'ambigrel.s=%s' % ambigrel.s
+ > print('ambigrel.s=%s' % ambigrel.s)
> EOF
$ (PYTHONPATH=${PYTHONPATH}${PATHSEP}${TESTTMP}/libroot; hg --config extensions.loadrel=loadrel.py root)
ambigrel.s=libroot/mod/ambig.py
@@ -510,6 +510,53 @@
$ (PYTHONPATH=${PYTHONPATH}${PATHSEP}${TESTTMP}; hg --config extensions.checkrelativity=$TESTTMP/checkrelativity.py checkrelativity)
+Make sure a broken uisetup doesn't globally break hg:
+ $ cat > $TESTTMP/baduisetup.py <<EOF
+ > from mercurial import (
+ > bdiff,
+ > extensions,
+ > )
+ >
+ > def blockswrapper(orig, *args, **kwargs):
+ > return orig(*args, **kwargs)
+ >
+ > def uisetup(ui):
+ > extensions.wrapfunction(bdiff, 'blocks', blockswrapper)
+ > EOF
+
+Even though the extension fails during uisetup, hg is still basically usable:
+ $ hg --config extensions.baduisetup=$TESTTMP/baduisetup.py version
+ \*\*\* failed to set up extension baduisetup: No module named (mercurial\.)?bdiff (re)
+ Mercurial Distributed SCM (version *) (glob)
+ (see https://mercurial-scm.org for more information)
+
+ Copyright (C) 2005-2017 Matt Mackall and others
+ This is free software; see the source for copying conditions. There is NO
+ warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+ $ hg --config extensions.baduisetup=$TESTTMP/baduisetup.py version --traceback
+ Traceback (most recent call last):
+ File "*/mercurial/extensions.py", line *, in _runuisetup (glob)
+ uisetup(ui)
+ File "$TESTTMP/baduisetup.py", line 10, in uisetup
+ extensions.wrapfunction(bdiff, 'blocks', blockswrapper)
+ File "*/mercurial/extensions.py", line *, in wrapfunction (glob)
+ origfn = getattr(container, funcname)
+ File "*/hgdemandimport/demandimportpy2.py", line *, in __getattr__ (glob)
+ self._load()
+ File "*/hgdemandimport/demandimportpy2.py", line *, in _load (glob)
+ mod = _hgextimport(_origimport, head, globals, locals, None, level)
+ File "*/hgdemandimport/demandimportpy2.py", line *, in _hgextimport (glob)
+ return importfunc(name, globals, *args, **kwargs)
+ ImportError: No module named (mercurial\.)?bdiff (re)
+ \*\*\* failed to set up extension baduisetup: No module named (mercurial\.)?bdiff (re)
+ Mercurial Distributed SCM (version *) (glob)
+ (see https://mercurial-scm.org for more information)
+
+ Copyright (C) 2005-2017 Matt Mackall and others
+ This is free software; see the source for copying conditions. There is NO
+ warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
#endif
$ cd ..
@@ -1663,53 +1710,25 @@
devel-warn: cmdutil.command is deprecated, use registrar.command to register 'foo'
(compatibility will be dropped after Mercurial-4.6, update your code.) * (glob)
-Make sure a broken uisetup doesn't globally break hg:
- $ cat > $TESTTMP/baduisetup.py <<EOF
- > from mercurial import (
- > bdiff,
- > extensions,
- > )
- >
- > def blockswrapper(orig, *args, **kwargs):
- > return orig(*args, **kwargs)
- >
- > def uisetup(ui):
- > extensions.wrapfunction(bdiff, 'blocks', blockswrapper)
- > EOF
- $ cat >> $HGRCPATH <<EOF
- > [extensions]
- > baduisetup = $TESTTMP/baduisetup.py
- > EOF
+Prohibit the use of unicode strings as the default value of options
+
+ $ hg init $TESTTMP/opt-unicode-default
-Even though the extension fails during uisetup, hg is still basically usable:
- $ hg version
- \*\*\* failed to set up extension baduisetup: No module named (mercurial\.)?bdiff (re)
- Mercurial Distributed SCM (version *) (glob)
- (see https://mercurial-scm.org for more information)
-
- Copyright (C) 2005-2017 Matt Mackall and others
- This is free software; see the source for copying conditions. There is NO
- warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-
- $ hg version --traceback
- Traceback (most recent call last):
- File "*/mercurial/extensions.py", line *, in _runuisetup (glob)
- uisetup(ui)
- File "$TESTTMP/baduisetup.py", line 10, in uisetup
- extensions.wrapfunction(bdiff, 'blocks', blockswrapper)
- File "*/mercurial/extensions.py", line *, in wrapfunction (glob)
- origfn = getattr(container, funcname)
- File "*/hgdemandimport/demandimportpy2.py", line *, in __getattr__ (glob)
- self._load()
- File "*/hgdemandimport/demandimportpy2.py", line *, in _load (glob)
- mod = _hgextimport(_origimport, head, globals, locals, None, level)
- File "*/hgdemandimport/demandimportpy2.py", line *, in _hgextimport (glob)
- return importfunc(name, globals, *args, **kwargs)
- ImportError: No module named (mercurial\.)?bdiff (re)
- \*\*\* failed to set up extension baduisetup: No module named (mercurial\.)?bdiff (re)
- Mercurial Distributed SCM (version *) (glob)
- (see https://mercurial-scm.org for more information)
-
- Copyright (C) 2005-2017 Matt Mackall and others
- This is free software; see the source for copying conditions. There is NO
- warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+ $ cat > $TESTTMP/test_unicode_default_value.py << EOF
+ > from mercurial import registrar
+ > cmdtable = {}
+ > command = registrar.command(cmdtable)
+ > @command('dummy', [('', 'opt', u'value', u'help')], 'ext [OPTIONS]')
+ > def ext(*args, **opts):
+ > print(opts['opt'])
+ > EOF
+ $ cat > $TESTTMP/opt-unicode-default/.hg/hgrc << EOF
+ > [extensions]
+ > test_unicode_default_value = $TESTTMP/test_unicode_default_value.py
+ > EOF
+ $ hg -R $TESTTMP/opt-unicode-default dummy
+ *** failed to import extension test_unicode_default_value from $TESTTMP/test_unicode_default_value.py: option 'dummy.opt' has a unicode default value
+ *** (change the dummy.opt default value to a non-unicode string)
+ hg: unknown command 'dummy'
+ (did you mean summary?)
+ [255]
--- a/tests/test-extensions-wrapfunction.py Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-extensions-wrapfunction.py Sat Sep 30 07:52:48 2017 -0700
@@ -37,3 +37,28 @@
batchwrap(wrappers + [wrappers[0]])
batchunwrap([(wrappers[i] if i >= 0 else None)
for i in [3, None, 0, 4, 0, 2, 1, None]])
+
+wrap0 = extensions.wrappedfunction(dummy, 'getstack', wrappers[0])
+wrap1 = extensions.wrappedfunction(dummy, 'getstack', wrappers[1])
+
+# Use them in a different order from how they were created to check that
+# the wrapping happens in __enter__, not in __init__
+print('context manager', dummy.getstack())
+with wrap1:
+ print('context manager', dummy.getstack())
+ with wrap0:
+ print('context manager', dummy.getstack())
+ # Bad programmer forgets to unwrap the function, but the context
+ # managers still unwrap their wrappings.
+ extensions.wrapfunction(dummy, 'getstack', wrappers[2])
+ print('context manager', dummy.getstack())
+ print('context manager', dummy.getstack())
+print('context manager', dummy.getstack())
+
+# Wrap callable object which has no __name__
+class callableobj(object):
+ def __call__(self):
+ return ['orig']
+dummy.cobj = callableobj()
+extensions.wrapfunction(dummy, 'cobj', wrappers[0])
+print('wrap callable object', dummy.cobj())
--- a/tests/test-extensions-wrapfunction.py.out Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-extensions-wrapfunction.py.out Sat Sep 30 07:52:48 2017 -0700
@@ -12,3 +12,10 @@
unwrap 2: 2: [1, 'orig']
unwrap 1: 1: ['orig']
unwrap -: -: IndexError
+context manager ['orig']
+context manager [1, 'orig']
+context manager [0, 1, 'orig']
+context manager [2, 0, 1, 'orig']
+context manager [2, 1, 'orig']
+context manager [2, 'orig']
+wrap callable object [0, 'orig']
--- a/tests/test-filebranch.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-filebranch.t Sat Sep 30 07:52:48 2017 -0700
@@ -2,8 +2,9 @@
when we do a merge.
$ cat <<EOF > merge
+ > from __future__ import print_function
> import sys, os
- > print "merging for", os.path.basename(sys.argv[1])
+ > print("merging for", os.path.basename(sys.argv[1]))
> EOF
$ HGMERGE="$PYTHON ../merge"; export HGMERGE
--- a/tests/test-fileset.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-fileset.t Sat Sep 30 07:52:48 2017 -0700
@@ -17,20 +17,20 @@
Test operators and basic patterns
$ fileset -v a1
- ('symbol', 'a1')
+ (symbol 'a1')
a1
$ fileset -v 'a*'
- ('symbol', 'a*')
+ (symbol 'a*')
a1
a2
$ fileset -v '"re:a\d"'
- ('string', 're:a\\d')
+ (string 're:a\\d')
a1
a2
$ fileset -v 'a1 or a2'
(or
- ('symbol', 'a1')
- ('symbol', 'a2'))
+ (symbol 'a1')
+ (symbol 'a2'))
a1
a2
$ fileset 'a1 | a2'
--- a/tests/test-fncache.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-fncache.t Sat Sep 30 07:52:48 2017 -0700
@@ -205,9 +205,9 @@
Aborting lock does not prevent fncache writes
$ cat > exceptionext.py <<EOF
+ > from __future__ import absolute_import
> import os
- > from mercurial import commands, error
- > from mercurial.extensions import wrapcommand, wrapfunction
+ > from mercurial import commands, error, extensions
>
> def lockexception(orig, vfs, lockname, wait, releasefn, *args, **kwargs):
> def releasewrap():
@@ -217,7 +217,7 @@
> return l
>
> def reposetup(ui, repo):
- > wrapfunction(repo, '_lock', lockexception)
+ > extensions.wrapfunction(repo, '_lock', lockexception)
>
> cmdtable = {}
>
@@ -236,7 +236,7 @@
> wlock.release()
>
> def extsetup(ui):
- > wrapcommand(commands.table, "commit", commitwrap)
+ > extensions.wrapcommand(commands.table, "commit", commitwrap)
> EOF
$ extpath=`pwd`/exceptionext.py
$ hg init fncachetxn
@@ -252,9 +252,9 @@
Aborting transaction prevents fncache change
$ cat > ../exceptionext.py <<EOF
+ > from __future__ import absolute_import
> import os
- > from mercurial import commands, error, localrepo
- > from mercurial.extensions import wrapfunction
+ > from mercurial import commands, error, extensions, localrepo
>
> def wrapper(orig, self, *args, **kwargs):
> tr = orig(self, *args, **kwargs)
@@ -265,7 +265,8 @@
> return tr
>
> def uisetup(ui):
- > wrapfunction(localrepo.localrepository, 'transaction', wrapper)
+ > extensions.wrapfunction(
+ > localrepo.localrepository, 'transaction', wrapper)
>
> cmdtable = {}
>
@@ -287,9 +288,15 @@
Aborted transactions can be recovered later
$ cat > ../exceptionext.py <<EOF
+ > from __future__ import absolute_import
> import os
- > from mercurial import commands, error, transaction, localrepo
- > from mercurial.extensions import wrapfunction
+ > from mercurial import (
+ > commands,
+ > error,
+ > extensions,
+ > localrepo,
+ > transaction,
+ > )
>
> def trwrapper(orig, self, *args, **kwargs):
> tr = orig(self, *args, **kwargs)
@@ -303,8 +310,10 @@
> raise error.Abort("forced transaction failure")
>
> def uisetup(ui):
- > wrapfunction(localrepo.localrepository, 'transaction', trwrapper)
- > wrapfunction(transaction.transaction, '_abort', abortwrapper)
+ > extensions.wrapfunction(localrepo.localrepository, 'transaction',
+ > trwrapper)
+ > extensions.wrapfunction(transaction.transaction, '_abort',
+ > abortwrapper)
>
> cmdtable = {}
>
--- a/tests/test-generaldelta.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-generaldelta.t Sat Sep 30 07:52:48 2017 -0700
@@ -154,10 +154,10 @@
0 files updated, 0 files merged, 5 files removed, 0 files unresolved
saved backup bundle to $TESTTMP/aggressive/.hg/strip-backup/1c5d4dc9a8b8-6c68e60c-backup.hg (glob)
$ hg debugbundle .hg/strip-backup/*
- Stream params: sortdict([('Compression', 'BZ')])
- changegroup -- "sortdict([('version', '02'), ('nbchanges', '1')])"
+ Stream params: {Compression: BZ}
+ changegroup -- {nbchanges: 1, version: 02}
1c5d4dc9a8b8d6e1750966d343e94db665e7a1e9
- phase-heads -- 'sortdict()'
+ phase-heads -- {}
1c5d4dc9a8b8d6e1750966d343e94db665e7a1e9 draft
$ cd ..
--- a/tests/test-getbundle.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-getbundle.t Sat Sep 30 07:52:48 2017 -0700
@@ -170,7 +170,7 @@
$ hg debuggetbundle repo bundle -t bundle2
$ hg debugbundle bundle
Stream params: {}
- changegroup -- "sortdict([('version', '01')])"
+ changegroup -- {version: 01}
7704483d56b2a7b5db54dcee7c62378ac629b348
29a4d1f17bd3f0779ca0525bebb1cfb51067c738
713346a995c363120712aed1aee7e04afd867638
--- a/tests/test-glog.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-glog.t Sat Sep 30 07:52:48 2017 -0700
@@ -82,7 +82,13 @@
> }
$ cat > printrevset.py <<EOF
- > from mercurial import extensions, revsetlang, commands, cmdutil
+ > from __future__ import absolute_import
+ > from mercurial import (
+ > cmdutil,
+ > commands,
+ > extensions,
+ > revsetlang,
+ > )
>
> def uisetup(ui):
> def printrevset(orig, ui, repo, *pats, **opts):
@@ -1457,11 +1463,11 @@
(or
(list
(func
- ('symbol', 'user')
- ('string', 'test'))
+ (symbol 'user')
+ (string 'test'))
(func
- ('symbol', 'user')
- ('string', 'not-a-user'))))))
+ (symbol 'user')
+ (string 'not-a-user'))))))
$ testlog -b not-a-branch
abort: unknown revision 'not-a-branch'!
abort: unknown revision 'not-a-branch'!
@@ -1473,14 +1479,14 @@
(or
(list
(func
- ('symbol', 'branch')
- ('string', 'default'))
+ (symbol 'branch')
+ (string 'default'))
(func
- ('symbol', 'branch')
- ('string', 'branch'))
+ (symbol 'branch')
+ (string 'branch'))
(func
- ('symbol', 'branch')
- ('string', 'branch'))))))
+ (symbol 'branch')
+ (string 'branch'))))))
$ testlog -k expand -k merge
[]
(group
@@ -1488,30 +1494,30 @@
(or
(list
(func
- ('symbol', 'keyword')
- ('string', 'expand'))
+ (symbol 'keyword')
+ (string 'expand'))
(func
- ('symbol', 'keyword')
- ('string', 'merge'))))))
+ (symbol 'keyword')
+ (string 'merge'))))))
$ testlog --only-merges
[]
(group
(func
- ('symbol', 'merge')
+ (symbol 'merge')
None))
$ testlog --no-merges
[]
(group
(not
(func
- ('symbol', 'merge')
+ (symbol 'merge')
None)))
$ testlog --date '2 0 to 4 0'
[]
(group
(func
- ('symbol', 'date')
- ('string', '2 0 to 4 0')))
+ (symbol 'date')
+ (string '2 0 to 4 0')))
$ hg log -G -d 'brace ) in a date'
hg: parse error: invalid date: 'brace ) in a date'
[255]
@@ -1524,18 +1530,18 @@
(group
(or
(list
- ('string', '31')
+ (string '31')
(func
- ('symbol', 'ancestors')
- ('string', '31'))))))
+ (symbol 'ancestors')
+ (string '31'))))))
(not
(group
(or
(list
- ('string', '32')
+ (string '32')
(func
- ('symbol', 'ancestors')
- ('string', '32')))))))))
+ (symbol 'ancestors')
+ (string '32')))))))))
Dedicated repo for --follow and paths filtering. The g is crafted to
have 2 filelog topological heads in a linear changeset graph.
@@ -1585,8 +1591,8 @@
(group
(group
(func
- ('symbol', 'filelog')
- ('string', 'a'))))
+ (symbol 'filelog')
+ (string 'a'))))
$ testlog a b
[]
(group
@@ -1594,11 +1600,11 @@
(or
(list
(func
- ('symbol', 'filelog')
- ('string', 'a'))
+ (symbol 'filelog')
+ (string 'a'))
(func
- ('symbol', 'filelog')
- ('string', 'b'))))))
+ (symbol 'filelog')
+ (string 'b'))))))
Test falling back to slow path for non-existing files
@@ -1606,12 +1612,12 @@
[]
(group
(func
- ('symbol', '_matchfiles')
+ (symbol '_matchfiles')
(list
- ('string', 'r:')
- ('string', 'd:relpath')
- ('string', 'p:a')
- ('string', 'p:c'))))
+ (string 'r:')
+ (string 'd:relpath')
+ (string 'p:a')
+ (string 'p:c'))))
Test multiple --include/--exclude/paths
@@ -1619,21 +1625,21 @@
[]
(group
(func
- ('symbol', '_matchfiles')
+ (symbol '_matchfiles')
(list
- ('string', 'r:')
- ('string', 'd:relpath')
- ('string', 'p:a')
- ('string', 'p:e')
- ('string', 'i:a')
- ('string', 'i:e')
- ('string', 'x:b')
- ('string', 'x:e'))))
+ (string 'r:')
+ (string 'd:relpath')
+ (string 'p:a')
+ (string 'p:e')
+ (string 'i:a')
+ (string 'i:e')
+ (string 'x:b')
+ (string 'x:e'))))
Test glob expansion of pats
$ expandglobs=`$PYTHON -c "import mercurial.util; \
- > print mercurial.util.expandglobs and 'true' or 'false'"`
+ > print(mercurial.util.expandglobs and 'true' or 'false')"`
$ if [ $expandglobs = "true" ]; then
> testlog 'a*';
> else
@@ -1643,8 +1649,8 @@
(group
(group
(func
- ('symbol', 'filelog')
- ('string', 'aa'))))
+ (symbol 'filelog')
+ (string 'aa'))))
Test --follow on a non-existent directory
@@ -1661,14 +1667,14 @@
(group
(and
(func
- ('symbol', 'ancestors')
- ('symbol', '.'))
+ (symbol 'ancestors')
+ (symbol '.'))
(func
- ('symbol', '_matchfiles')
+ (symbol '_matchfiles')
(list
- ('string', 'r:')
- ('string', 'd:relpath')
- ('string', 'p:dir')))))
+ (string 'r:')
+ (string 'd:relpath')
+ (string 'p:dir')))))
$ hg up -q tip
Test --follow on file not in parent revision
@@ -1685,14 +1691,14 @@
(group
(and
(func
- ('symbol', 'ancestors')
- ('symbol', '.'))
+ (symbol 'ancestors')
+ (symbol '.'))
(func
- ('symbol', '_matchfiles')
+ (symbol '_matchfiles')
(list
- ('string', 'r:')
- ('string', 'd:relpath')
- ('string', 'p:glob:*')))))
+ (string 'r:')
+ (string 'd:relpath')
+ (string 'p:glob:*')))))
Test --follow on a single rename
@@ -1702,8 +1708,8 @@
(group
(group
(func
- ('symbol', 'follow')
- ('string', 'a'))))
+ (symbol 'follow')
+ (string 'a'))))
Test --follow and multiple renames
@@ -1713,8 +1719,8 @@
(group
(group
(func
- ('symbol', 'follow')
- ('string', 'e'))))
+ (symbol 'follow')
+ (string 'e'))))
Test --follow and multiple filelog heads
@@ -1724,8 +1730,8 @@
(group
(group
(func
- ('symbol', 'follow')
- ('string', 'g'))))
+ (symbol 'follow')
+ (string 'g'))))
$ cat log.nodes
nodetag 2
nodetag 1
@@ -1736,8 +1742,8 @@
(group
(group
(func
- ('symbol', 'follow')
- ('string', 'g'))))
+ (symbol 'follow')
+ (string 'g'))))
$ cat log.nodes
nodetag 3
nodetag 2
@@ -1752,11 +1758,11 @@
(or
(list
(func
- ('symbol', 'follow')
- ('string', 'g'))
+ (symbol 'follow')
+ (string 'g'))
(func
- ('symbol', 'follow')
- ('string', 'e'))))))
+ (symbol 'follow')
+ (string 'e'))))))
$ cat log.nodes
nodetag 4
nodetag 3
@@ -1786,10 +1792,10 @@
[]
(group
(func
- ('symbol', '_firstancestors')
+ (symbol '_firstancestors')
(func
- ('symbol', 'rev')
- ('symbol', '6'))))
+ (symbol 'rev')
+ (symbol '6'))))
Cannot compare with log --follow-first FILE as it never worked
@@ -1798,8 +1804,8 @@
(group
(group
(func
- ('symbol', '_followfirst')
- ('string', 'e'))))
+ (symbol '_followfirst')
+ (string 'e'))))
$ hg log -G --follow-first e --template '{rev} {desc|firstline}\n'
@ 6 merge 5 and 4
|\
@@ -1833,20 +1839,20 @@
[]
(group
(func
- ('symbol', '_matchfiles')
+ (symbol '_matchfiles')
(list
- ('string', 'r:')
- ('string', 'd:relpath')
- ('string', 'p:set:copied()'))))
+ (string 'r:')
+ (string 'd:relpath')
+ (string 'p:set:copied()'))))
$ testlog --include "set:copied()"
[]
(group
(func
- ('symbol', '_matchfiles')
+ (symbol '_matchfiles')
(list
- ('string', 'r:')
- ('string', 'd:relpath')
- ('string', 'i:set:copied()'))))
+ (string 'r:')
+ (string 'd:relpath')
+ (string 'i:set:copied()'))))
$ testlog -r "sort(file('set:copied()'), -rev)"
["sort(file('set:copied()'), -rev)"]
[]
@@ -1860,24 +1866,24 @@
[]
(group
(func
- ('symbol', '_matchfiles')
+ (symbol '_matchfiles')
(list
- ('string', 'r:')
- ('string', 'd:relpath')
- ('string', 'p:a'))))
+ (string 'r:')
+ (string 'd:relpath')
+ (string 'p:a'))))
$ testlog --removed --follow a
[]
(group
(and
(func
- ('symbol', 'ancestors')
- ('symbol', '.'))
+ (symbol 'ancestors')
+ (symbol '.'))
(func
- ('symbol', '_matchfiles')
+ (symbol '_matchfiles')
(list
- ('string', 'r:')
- ('string', 'd:relpath')
- ('string', 'p:a')))))
+ (string 'r:')
+ (string 'd:relpath')
+ (string 'p:a')))))
Test --patch and --stat with --follow and --follow-first
@@ -2203,10 +2209,10 @@
['6', '8', '5', '7', '4']
(group
(func
- ('symbol', 'descendants')
+ (symbol 'descendants')
(func
- ('symbol', 'rev')
- ('symbol', '6'))))
+ (symbol 'rev')
+ (symbol '6'))))
Test --follow-first and forward --rev
@@ -2214,10 +2220,10 @@
['6', '8', '5', '7', '4']
(group
(func
- ('symbol', '_firstdescendants')
+ (symbol '_firstdescendants')
(func
- ('symbol', 'rev')
- ('symbol', '6'))))
+ (symbol 'rev')
+ (symbol '6'))))
--- log.nodes * (glob)
+++ glog.nodes * (glob)
@@ -1,3 +1,3 @@
@@ -2232,10 +2238,10 @@
['6', '5', '7', '8', '4']
(group
(func
- ('symbol', 'ancestors')
+ (symbol 'ancestors')
(func
- ('symbol', 'rev')
- ('symbol', '6'))))
+ (symbol 'rev')
+ (symbol '6'))))
Test --follow-first and backward --rev
@@ -2243,10 +2249,10 @@
['6', '5', '7', '8', '4']
(group
(func
- ('symbol', '_firstancestors')
+ (symbol '_firstancestors')
(func
- ('symbol', 'rev')
- ('symbol', '6'))))
+ (symbol 'rev')
+ (symbol '6'))))
Test --follow with --rev of graphlog extension
@@ -2264,25 +2270,25 @@
[]
(group
(func
- ('symbol', '_matchfiles')
+ (symbol '_matchfiles')
(list
- ('string', 'r:')
- ('string', 'd:relpath')
- ('string', 'p:.'))))
+ (string 'r:')
+ (string 'd:relpath')
+ (string 'p:.'))))
$ testlog ../b
[]
(group
(group
(func
- ('symbol', 'filelog')
- ('string', '../b'))))
+ (symbol 'filelog')
+ (string '../b'))))
$ testlog -f ../b
[]
(group
(group
(func
- ('symbol', 'follow')
- ('string', 'b'))))
+ (symbol 'follow')
+ (string 'b'))))
$ cd ..
Test --hidden
@@ -2290,7 +2296,7 @@
$ cat >> $HGRCPATH << EOF
> [experimental]
- > evolution=createmarkers
+ > stabilization=createmarkers
> EOF
$ hg debugobsolete `hg id --debug -i -r 8`
--- a/tests/test-graft.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-graft.t Sat Sep 30 07:52:48 2017 -0700
@@ -221,6 +221,25 @@
$ hg summary |grep graft
commit: 2 modified, 2 unknown, 1 unresolved (graft in progress)
+Using status to get more context
+
+ $ hg status --verbose
+ M d
+ M e
+ ? a.orig
+ ? e.orig
+ # The repository is in an unfinished *graft* state.
+
+ # Unresolved merge conflicts:
+ #
+ # e
+ #
+ # To mark files as resolved: hg resolve --mark FILE
+
+ # To continue: hg graft --continue
+ # To abort: hg update --clean . (warning: this will discard uncommitted changes)
+
+
Commit while interrupted should fail:
$ hg ci -m 'commit interrupted graft'
--- a/tests/test-hardlinks.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-hardlinks.t Sat Sep 30 07:52:48 2017 -0700
@@ -1,11 +1,12 @@
#require hardlink
$ cat > nlinks.py <<EOF
+ > from __future__ import print_function
> import sys
> from mercurial import util
> for f in sorted(sys.stdin.readlines()):
> f = f[:-1]
- > print util.nlinks(f), f
+ > print(util.nlinks(f), f)
> EOF
$ nlinksdir()
@@ -16,8 +17,9 @@
Some implementations of cp can't create hardlinks (replaces 'cp -al' on Linux):
$ cat > linkcp.py <<EOF
+ > from __future__ import absolute_import
+ > import sys
> from mercurial import util
- > import sys
> util.copyfiles(sys.argv[1], sys.argv[2], hardlink=True)
> EOF
--- a/tests/test-help.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-help.t Sat Sep 30 07:52:48 2017 -0700
@@ -553,6 +553,7 @@
-w --ignore-all-space ignore white space when comparing lines
-b --ignore-space-change ignore changes in the amount of white space
-B --ignore-blank-lines ignore changes whose lines are all blank
+ -Z --ignore-space-at-eol ignore changes in whitespace at EOL
-U --unified NUM number of lines of context to show
--stat output diffstat-style summary of changes
--root DIR produce diffs relative to subdirectory
@@ -1516,8 +1517,8 @@
>
> This paragraph is never omitted, too (for extension)
> '''
- >
- > from mercurial import help, commands
+ > from __future__ import absolute_import
+ > from mercurial import commands, help
> testtopic = """This paragraph is never omitted (for topic).
>
> .. container:: verbose
@@ -1709,7 +1710,7 @@
$ $PYTHON <<EOF | sh
> upper = "\x8bL\x98^"
- > print "hg --encoding cp932 help -e ambiguous.%s" % upper
+ > print("hg --encoding cp932 help -e ambiguous.%s" % upper)
> EOF
\x8bL\x98^ (esc)
----
@@ -1719,7 +1720,7 @@
$ $PYTHON <<EOF | sh
> lower = "\x8bl\x98^"
- > print "hg --encoding cp932 help -e ambiguous.%s" % lower
+ > print("hg --encoding cp932 help -e ambiguous.%s" % lower)
> EOF
\x8bl\x98^ (esc)
----
--- a/tests/test-hgweb-auth.py Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-hgweb-auth.py Sat Sep 30 07:52:48 2017 -0700
@@ -25,7 +25,7 @@
def dumpdict(dict):
return '{' + ', '.join(['%s: %s' % (k, dict[k])
- for k in sorted(dict.iterkeys())]) + '}'
+ for k in sorted(dict)]) + '}'
def test(auth, urls=None):
print('CFG:', dumpdict(auth))
--- a/tests/test-hgweb-commands.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-hgweb-commands.t Sat Sep 30 07:52:48 2017 -0700
@@ -1926,7 +1926,7 @@
$ get-with-headers.py $LOCALIP:$HGPORT '?cmd=capabilities'; echo
200 Script output follows
- lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx compression=*zlib (glob)
+ lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Aphases%3Dheads%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx compression=*,zlib (glob)
heads
@@ -2174,7 +2174,7 @@
batch
stream-preferred
streamreqs=generaldelta,revlogv1
- bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps
+ bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Aphases%3Dheads%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps
unbundle=HG10GZ,HG10BZ,HG10UN
httpheader=1024
httpmediatype=0.1rx,0.1tx,0.2tx
--- a/tests/test-hgweb-no-path-info.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-hgweb-no-path-info.t Sat Sep 30 07:52:48 2017 -0700
@@ -31,11 +31,11 @@
> input = stringio()
>
> def startrsp(status, headers):
- > print '---- STATUS'
- > print status
- > print '---- HEADERS'
- > print [i for i in headers if i[0] != 'ETag']
- > print '---- DATA'
+ > print('---- STATUS')
+ > print(status)
+ > print('---- HEADERS')
+ > print([i for i in headers if i[0] != 'ETag'])
+ > print('---- DATA')
> return output.write
>
> env = {
@@ -59,8 +59,8 @@
> sys.stdout.write(output.getvalue())
> sys.stdout.write(''.join(content))
> getattr(content, 'close', lambda : None)()
- > print '---- ERRORS'
- > print errors.getvalue()
+ > print('---- ERRORS')
+ > print(errors.getvalue())
>
> output = stringio()
> env['QUERY_STRING'] = 'style=atom'
--- a/tests/test-hgweb-no-request-uri.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-hgweb-no-request-uri.t Sat Sep 30 07:52:48 2017 -0700
@@ -31,11 +31,11 @@
> input = stringio()
>
> def startrsp(status, headers):
- > print '---- STATUS'
- > print status
- > print '---- HEADERS'
- > print [i for i in headers if i[0] != 'ETag']
- > print '---- DATA'
+ > print('---- STATUS')
+ > print(status)
+ > print('---- HEADERS')
+ > print([i for i in headers if i[0] != 'ETag'])
+ > print('---- DATA')
> return output.write
>
> env = {
@@ -58,8 +58,8 @@
> sys.stdout.write(output.getvalue())
> sys.stdout.write(''.join(content))
> getattr(content, 'close', lambda : None)()
- > print '---- ERRORS'
- > print errors.getvalue()
+ > print('---- ERRORS')
+ > print(errors.getvalue())
>
> output = stringio()
> env['PATH_INFO'] = '/'
--- a/tests/test-hgweb-non-interactive.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-hgweb-non-interactive.t Sat Sep 30 07:52:48 2017 -0700
@@ -41,11 +41,11 @@
> output = stringio()
>
> def startrsp(status, headers):
- > print '---- STATUS'
- > print status
- > print '---- HEADERS'
- > print [i for i in headers if i[0] != 'ETag']
- > print '---- DATA'
+ > print('---- STATUS')
+ > print(status)
+ > print('---- HEADERS')
+ > print([i for i in headers if i[0] != 'ETag'])
+ > print('---- DATA')
> return output.write
>
> env = {
@@ -68,13 +68,13 @@
> i = hgweb('.')
> for c in i(env, startrsp):
> pass
- > print '---- ERRORS'
- > print errors.getvalue()
- > print '---- OS.ENVIRON wsgi variables'
- > print sorted([x for x in os.environ if x.startswith('wsgi')])
- > print '---- request.ENVIRON wsgi variables'
+ > print('---- ERRORS')
+ > print(errors.getvalue())
+ > print('---- OS.ENVIRON wsgi variables')
+ > print(sorted([x for x in os.environ if x.startswith('wsgi')]))
+ > print('---- request.ENVIRON wsgi variables')
> with i._obtainrepo() as repo:
- > print sorted([x for x in repo.ui.environ if x.startswith('wsgi')])
+ > print(sorted([x for x in repo.ui.environ if x.startswith('wsgi')]))
> EOF
$ $PYTHON request.py
---- STATUS
--- a/tests/test-highlight.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-highlight.t Sat Sep 30 07:52:48 2017 -0700
@@ -49,7 +49,7 @@
> except (ValueError, IndexError):
> n = 10
> p = primes()
- > print "The first %d primes: %s" % (n, list(islice(p, n)))
+ > print("The first %d primes: %s" % (n, list(islice(p, n))))
> EOF
$ echo >> primes.py # to test html markup with an empty line just before EOF
$ hg ci -Ama
@@ -74,7 +74,7 @@
<script type="text/javascript" src="/static/mercurial.js"></script>
<link rel="stylesheet" href="/highlightcss" type="text/css" />
- <title>test: 1af356141006 primes.py</title>
+ <title>test: f4fca47b67e6 primes.py</title>
</head>
<body>
@@ -112,7 +112,7 @@
<div class="main">
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>
- view primes.py @ 0:<a href="/rev/1af356141006">1af356141006</a>
+ view primes.py @ 0:<a href="/rev/f4fca47b67e6">f4fca47b67e6</a>
<span class="tag">tip</span>
</h3>
@@ -182,7 +182,7 @@
<span id="l27"> <span class="kn">except</span> <span class="p">(</span><span class="ne">ValueError</span><span class="p">,</span> <span class="ne">IndexError</span><span class="p">):</span></span><a href="#l27"></a>
<span id="l28"> <span class="n">n</span> <span class="o">=</span> <span class="mi">10</span></span><a href="#l28"></a>
<span id="l29"> <span class="n">p</span> <span class="o">=</span> <span class="n">primes</span><span class="p">()</span></span><a href="#l29"></a>
- <span id="l30"> <span class="kn">print</span> <span class="s">"The first </span><span class="si">%d</span><span class="s"> primes: </span><span class="si">%s</span><span class="s">"</span> <span class="o">%</span> <span class="p">(</span><span class="n">n</span><span class="p">,</span> <span class="nb">list</span><span class="p">(</span><span class="n">islice</span><span class="p">(</span><span class="n">p</span><span class="p">,</span> <span class="n">n</span><span class="p">)))</span></span><a href="#l30"></a>
+ <span id="l30"> <span class="kn">print</span><span class="p">(</span><span class="s">"The first </span><span class="si">%d</span><span class="s"> primes: </span><span class="si">%s</span><span class="s">"</span> <span class="o">%</span> <span class="p">(</span><span class="n">n</span><span class="p">,</span> <span class="nb">list</span><span class="p">(</span><span class="n">islice</span><span class="p">(</span><span class="n">p</span><span class="p">,</span> <span class="n">n</span><span class="p">))))</span></span><a href="#l30"></a>
<span id="l31"></span><a href="#l31"></a>
</pre>
</div>
@@ -251,7 +251,7 @@
<div class="main">
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>
- annotate primes.py @ 0:<a href="/rev/1af356141006">1af356141006</a>
+ annotate primes.py @ 0:<a href="/rev/f4fca47b67e6">f4fca47b67e6</a>
<span class="tag">tip</span>
</h3>
@@ -299,19 +299,19 @@
<tr id="l1" class="thisrev">
<td class="annotate parity0">
- <a href="/annotate/1af356141006/primes.py#l1">
+ <a href="/annotate/f4fca47b67e6/primes.py#l1">
0
</a>
<div class="annotate-info">
<div>
- <a href="/annotate/1af356141006/primes.py#l1">
- 1af356141006</a>
+ <a href="/annotate/f4fca47b67e6/primes.py#l1">
+ f4fca47b67e6</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/1af356141006/primes.py">diff</a>
- <a href="/rev/1af356141006">changeset</a>
+ <a href="/diff/f4fca47b67e6/primes.py">diff</a>
+ <a href="/rev/f4fca47b67e6">changeset</a>
</div>
</td>
<td class="source followlines-btn-parent"><a href="#l1"> 1</a> <span class="sd">"""Fun with generators. Corresponding Haskell implementation:</span></td>
@@ -321,14 +321,14 @@
<div class="annotate-info">
<div>
- <a href="/annotate/1af356141006/primes.py#l2">
- 1af356141006</a>
+ <a href="/annotate/f4fca47b67e6/primes.py#l2">
+ f4fca47b67e6</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/1af356141006/primes.py">diff</a>
- <a href="/rev/1af356141006">changeset</a>
+ <a href="/diff/f4fca47b67e6/primes.py">diff</a>
+ <a href="/rev/f4fca47b67e6">changeset</a>
</div>
</td>
<td class="source followlines-btn-parent"><a href="#l2"> 2</a> </td>
@@ -338,14 +338,14 @@
<div class="annotate-info">
<div>
- <a href="/annotate/1af356141006/primes.py#l3">
- 1af356141006</a>
+ <a href="/annotate/f4fca47b67e6/primes.py#l3">
+ f4fca47b67e6</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/1af356141006/primes.py">diff</a>
- <a href="/rev/1af356141006">changeset</a>
+ <a href="/diff/f4fca47b67e6/primes.py">diff</a>
+ <a href="/rev/f4fca47b67e6">changeset</a>
</div>
</td>
<td class="source followlines-btn-parent"><a href="#l3"> 3</a> <span class="sd">primes = 2 : sieve [3, 5..]</span></td>
@@ -355,14 +355,14 @@
<div class="annotate-info">
<div>
- <a href="/annotate/1af356141006/primes.py#l4">
- 1af356141006</a>
+ <a href="/annotate/f4fca47b67e6/primes.py#l4">
+ f4fca47b67e6</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/1af356141006/primes.py">diff</a>
- <a href="/rev/1af356141006">changeset</a>
+ <a href="/diff/f4fca47b67e6/primes.py">diff</a>
+ <a href="/rev/f4fca47b67e6">changeset</a>
</div>
</td>
<td class="source followlines-btn-parent"><a href="#l4"> 4</a> <span class="sd"> where sieve (p:ns) = p : sieve [n | n <- ns, mod n p /= 0]</span></td>
@@ -372,14 +372,14 @@
<div class="annotate-info">
<div>
- <a href="/annotate/1af356141006/primes.py#l5">
- 1af356141006</a>
+ <a href="/annotate/f4fca47b67e6/primes.py#l5">
+ f4fca47b67e6</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/1af356141006/primes.py">diff</a>
- <a href="/rev/1af356141006">changeset</a>
+ <a href="/diff/f4fca47b67e6/primes.py">diff</a>
+ <a href="/rev/f4fca47b67e6">changeset</a>
</div>
</td>
<td class="source followlines-btn-parent"><a href="#l5"> 5</a> <span class="sd">"""</span></td>
@@ -389,14 +389,14 @@
<div class="annotate-info">
<div>
- <a href="/annotate/1af356141006/primes.py#l6">
- 1af356141006</a>
+ <a href="/annotate/f4fca47b67e6/primes.py#l6">
+ f4fca47b67e6</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/1af356141006/primes.py">diff</a>
- <a href="/rev/1af356141006">changeset</a>
+ <a href="/diff/f4fca47b67e6/primes.py">diff</a>
+ <a href="/rev/f4fca47b67e6">changeset</a>
</div>
</td>
<td class="source followlines-btn-parent"><a href="#l6"> 6</a> </td>
@@ -406,14 +406,14 @@
<div class="annotate-info">
<div>
- <a href="/annotate/1af356141006/primes.py#l7">
- 1af356141006</a>
+ <a href="/annotate/f4fca47b67e6/primes.py#l7">
+ f4fca47b67e6</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/1af356141006/primes.py">diff</a>
- <a href="/rev/1af356141006">changeset</a>
+ <a href="/diff/f4fca47b67e6/primes.py">diff</a>
+ <a href="/rev/f4fca47b67e6">changeset</a>
</div>
</td>
<td class="source followlines-btn-parent"><a href="#l7"> 7</a> <span class="kn">from</span> <span class="nn">itertools</span> <span class="kn">import</span> <span class="n">dropwhile</span><span class="p">,</span> <span class="n">ifilter</span><span class="p">,</span> <span class="n">islice</span><span class="p">,</span> <span class="n">count</span><span class="p">,</span> <span class="n">chain</span></td>
@@ -423,14 +423,14 @@
<div class="annotate-info">
<div>
- <a href="/annotate/1af356141006/primes.py#l8">
- 1af356141006</a>
+ <a href="/annotate/f4fca47b67e6/primes.py#l8">
+ f4fca47b67e6</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/1af356141006/primes.py">diff</a>
- <a href="/rev/1af356141006">changeset</a>
+ <a href="/diff/f4fca47b67e6/primes.py">diff</a>
+ <a href="/rev/f4fca47b67e6">changeset</a>
</div>
</td>
<td class="source followlines-btn-parent"><a href="#l8"> 8</a> </td>
@@ -440,14 +440,14 @@
<div class="annotate-info">
<div>
- <a href="/annotate/1af356141006/primes.py#l9">
- 1af356141006</a>
+ <a href="/annotate/f4fca47b67e6/primes.py#l9">
+ f4fca47b67e6</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/1af356141006/primes.py">diff</a>
- <a href="/rev/1af356141006">changeset</a>
+ <a href="/diff/f4fca47b67e6/primes.py">diff</a>
+ <a href="/rev/f4fca47b67e6">changeset</a>
</div>
</td>
<td class="source followlines-btn-parent"><a href="#l9"> 9</a> <span class="kn">def</span> <span class="nf">primes</span><span class="p">():</span></td>
@@ -457,14 +457,14 @@
<div class="annotate-info">
<div>
- <a href="/annotate/1af356141006/primes.py#l10">
- 1af356141006</a>
+ <a href="/annotate/f4fca47b67e6/primes.py#l10">
+ f4fca47b67e6</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/1af356141006/primes.py">diff</a>
- <a href="/rev/1af356141006">changeset</a>
+ <a href="/diff/f4fca47b67e6/primes.py">diff</a>
+ <a href="/rev/f4fca47b67e6">changeset</a>
</div>
</td>
<td class="source followlines-btn-parent"><a href="#l10"> 10</a> <span class="sd">"""Generate all primes."""</span></td>
@@ -474,14 +474,14 @@
<div class="annotate-info">
<div>
- <a href="/annotate/1af356141006/primes.py#l11">
- 1af356141006</a>
+ <a href="/annotate/f4fca47b67e6/primes.py#l11">
+ f4fca47b67e6</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/1af356141006/primes.py">diff</a>
- <a href="/rev/1af356141006">changeset</a>
+ <a href="/diff/f4fca47b67e6/primes.py">diff</a>
+ <a href="/rev/f4fca47b67e6">changeset</a>
</div>
</td>
<td class="source followlines-btn-parent"><a href="#l11"> 11</a> <span class="kn">def</span> <span class="nf">sieve</span><span class="p">(</span><span class="n">ns</span><span class="p">):</span></td>
@@ -491,14 +491,14 @@
<div class="annotate-info">
<div>
- <a href="/annotate/1af356141006/primes.py#l12">
- 1af356141006</a>
+ <a href="/annotate/f4fca47b67e6/primes.py#l12">
+ f4fca47b67e6</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/1af356141006/primes.py">diff</a>
- <a href="/rev/1af356141006">changeset</a>
+ <a href="/diff/f4fca47b67e6/primes.py">diff</a>
+ <a href="/rev/f4fca47b67e6">changeset</a>
</div>
</td>
<td class="source followlines-btn-parent"><a href="#l12"> 12</a> <span class="n">p</span> <span class="o">=</span> <span class="n">ns</span><span class="o">.</span><span class="n">next</span><span class="p">()</span></td>
@@ -508,14 +508,14 @@
<div class="annotate-info">
<div>
- <a href="/annotate/1af356141006/primes.py#l13">
- 1af356141006</a>
+ <a href="/annotate/f4fca47b67e6/primes.py#l13">
+ f4fca47b67e6</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/1af356141006/primes.py">diff</a>
- <a href="/rev/1af356141006">changeset</a>
+ <a href="/diff/f4fca47b67e6/primes.py">diff</a>
+ <a href="/rev/f4fca47b67e6">changeset</a>
</div>
</td>
<td class="source followlines-btn-parent"><a href="#l13"> 13</a> <span class="c"># It is important to yield *here* in order to stop the</span></td>
@@ -525,14 +525,14 @@
<div class="annotate-info">
<div>
- <a href="/annotate/1af356141006/primes.py#l14">
- 1af356141006</a>
+ <a href="/annotate/f4fca47b67e6/primes.py#l14">
+ f4fca47b67e6</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/1af356141006/primes.py">diff</a>
- <a href="/rev/1af356141006">changeset</a>
+ <a href="/diff/f4fca47b67e6/primes.py">diff</a>
+ <a href="/rev/f4fca47b67e6">changeset</a>
</div>
</td>
<td class="source followlines-btn-parent"><a href="#l14"> 14</a> <span class="c"># infinite recursion.</span></td>
@@ -542,14 +542,14 @@
<div class="annotate-info">
<div>
- <a href="/annotate/1af356141006/primes.py#l15">
- 1af356141006</a>
+ <a href="/annotate/f4fca47b67e6/primes.py#l15">
+ f4fca47b67e6</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/1af356141006/primes.py">diff</a>
- <a href="/rev/1af356141006">changeset</a>
+ <a href="/diff/f4fca47b67e6/primes.py">diff</a>
+ <a href="/rev/f4fca47b67e6">changeset</a>
</div>
</td>
<td class="source followlines-btn-parent"><a href="#l15"> 15</a> <span class="kn">yield</span> <span class="n">p</span></td>
@@ -559,14 +559,14 @@
<div class="annotate-info">
<div>
- <a href="/annotate/1af356141006/primes.py#l16">
- 1af356141006</a>
+ <a href="/annotate/f4fca47b67e6/primes.py#l16">
+ f4fca47b67e6</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/1af356141006/primes.py">diff</a>
- <a href="/rev/1af356141006">changeset</a>
+ <a href="/diff/f4fca47b67e6/primes.py">diff</a>
+ <a href="/rev/f4fca47b67e6">changeset</a>
</div>
</td>
<td class="source followlines-btn-parent"><a href="#l16"> 16</a> <span class="n">ns</span> <span class="o">=</span> <span class="n">ifilter</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">n</span><span class="p">:</span> <span class="n">n</span> <span class="o">%</span> <span class="n">p</span> <span class="o">!=</span> <span class="mi">0</span><span class="p">,</span> <span class="n">ns</span><span class="p">)</span></td>
@@ -576,14 +576,14 @@
<div class="annotate-info">
<div>
- <a href="/annotate/1af356141006/primes.py#l17">
- 1af356141006</a>
+ <a href="/annotate/f4fca47b67e6/primes.py#l17">
+ f4fca47b67e6</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/1af356141006/primes.py">diff</a>
- <a href="/rev/1af356141006">changeset</a>
+ <a href="/diff/f4fca47b67e6/primes.py">diff</a>
+ <a href="/rev/f4fca47b67e6">changeset</a>
</div>
</td>
<td class="source followlines-btn-parent"><a href="#l17"> 17</a> <span class="kn">for</span> <span class="n">n</span> <span class="ow">in</span> <span class="n">sieve</span><span class="p">(</span><span class="n">ns</span><span class="p">):</span></td>
@@ -593,14 +593,14 @@
<div class="annotate-info">
<div>
- <a href="/annotate/1af356141006/primes.py#l18">
- 1af356141006</a>
+ <a href="/annotate/f4fca47b67e6/primes.py#l18">
+ f4fca47b67e6</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/1af356141006/primes.py">diff</a>
- <a href="/rev/1af356141006">changeset</a>
+ <a href="/diff/f4fca47b67e6/primes.py">diff</a>
+ <a href="/rev/f4fca47b67e6">changeset</a>
</div>
</td>
<td class="source followlines-btn-parent"><a href="#l18"> 18</a> <span class="kn">yield</span> <span class="n">n</span></td>
@@ -610,14 +610,14 @@
<div class="annotate-info">
<div>
- <a href="/annotate/1af356141006/primes.py#l19">
- 1af356141006</a>
+ <a href="/annotate/f4fca47b67e6/primes.py#l19">
+ f4fca47b67e6</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/1af356141006/primes.py">diff</a>
- <a href="/rev/1af356141006">changeset</a>
+ <a href="/diff/f4fca47b67e6/primes.py">diff</a>
+ <a href="/rev/f4fca47b67e6">changeset</a>
</div>
</td>
<td class="source followlines-btn-parent"><a href="#l19"> 19</a> </td>
@@ -627,14 +627,14 @@
<div class="annotate-info">
<div>
- <a href="/annotate/1af356141006/primes.py#l20">
- 1af356141006</a>
+ <a href="/annotate/f4fca47b67e6/primes.py#l20">
+ f4fca47b67e6</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/1af356141006/primes.py">diff</a>
- <a href="/rev/1af356141006">changeset</a>
+ <a href="/diff/f4fca47b67e6/primes.py">diff</a>
+ <a href="/rev/f4fca47b67e6">changeset</a>
</div>
</td>
<td class="source followlines-btn-parent"><a href="#l20"> 20</a> <span class="n">odds</span> <span class="o">=</span> <span class="n">ifilter</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">i</span><span class="p">:</span> <span class="n">i</span> <span class="o">%</span> <span class="mi">2</span> <span class="o">==</span> <span class="mi">1</span><span class="p">,</span> <span class="n">count</span><span class="p">())</span></td>
@@ -644,14 +644,14 @@
<div class="annotate-info">
<div>
- <a href="/annotate/1af356141006/primes.py#l21">
- 1af356141006</a>
+ <a href="/annotate/f4fca47b67e6/primes.py#l21">
+ f4fca47b67e6</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/1af356141006/primes.py">diff</a>
- <a href="/rev/1af356141006">changeset</a>
+ <a href="/diff/f4fca47b67e6/primes.py">diff</a>
+ <a href="/rev/f4fca47b67e6">changeset</a>
</div>
</td>
<td class="source followlines-btn-parent"><a href="#l21"> 21</a> <span class="kn">return</span> <span class="n">chain</span><span class="p">([</span><span class="mi">2</span><span class="p">],</span> <span class="n">sieve</span><span class="p">(</span><span class="n">dropwhile</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">n</span><span class="p">:</span> <span class="n">n</span> <span class="o"><</span> <span class="mi">3</span><span class="p">,</span> <span class="n">odds</span><span class="p">)))</span></td>
@@ -661,14 +661,14 @@
<div class="annotate-info">
<div>
- <a href="/annotate/1af356141006/primes.py#l22">
- 1af356141006</a>
+ <a href="/annotate/f4fca47b67e6/primes.py#l22">
+ f4fca47b67e6</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/1af356141006/primes.py">diff</a>
- <a href="/rev/1af356141006">changeset</a>
+ <a href="/diff/f4fca47b67e6/primes.py">diff</a>
+ <a href="/rev/f4fca47b67e6">changeset</a>
</div>
</td>
<td class="source followlines-btn-parent"><a href="#l22"> 22</a> </td>
@@ -678,14 +678,14 @@
<div class="annotate-info">
<div>
- <a href="/annotate/1af356141006/primes.py#l23">
- 1af356141006</a>
+ <a href="/annotate/f4fca47b67e6/primes.py#l23">
+ f4fca47b67e6</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/1af356141006/primes.py">diff</a>
- <a href="/rev/1af356141006">changeset</a>
+ <a href="/diff/f4fca47b67e6/primes.py">diff</a>
+ <a href="/rev/f4fca47b67e6">changeset</a>
</div>
</td>
<td class="source followlines-btn-parent"><a href="#l23"> 23</a> <span class="kn">if</span> <span class="n">__name__</span> <span class="o">==</span> <span class="s">"__main__"</span><span class="p">:</span></td>
@@ -695,14 +695,14 @@
<div class="annotate-info">
<div>
- <a href="/annotate/1af356141006/primes.py#l24">
- 1af356141006</a>
+ <a href="/annotate/f4fca47b67e6/primes.py#l24">
+ f4fca47b67e6</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/1af356141006/primes.py">diff</a>
- <a href="/rev/1af356141006">changeset</a>
+ <a href="/diff/f4fca47b67e6/primes.py">diff</a>
+ <a href="/rev/f4fca47b67e6">changeset</a>
</div>
</td>
<td class="source followlines-btn-parent"><a href="#l24"> 24</a> <span class="kn">import</span> <span class="nn">sys</span></td>
@@ -712,14 +712,14 @@
<div class="annotate-info">
<div>
- <a href="/annotate/1af356141006/primes.py#l25">
- 1af356141006</a>
+ <a href="/annotate/f4fca47b67e6/primes.py#l25">
+ f4fca47b67e6</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/1af356141006/primes.py">diff</a>
- <a href="/rev/1af356141006">changeset</a>
+ <a href="/diff/f4fca47b67e6/primes.py">diff</a>
+ <a href="/rev/f4fca47b67e6">changeset</a>
</div>
</td>
<td class="source followlines-btn-parent"><a href="#l25"> 25</a> <span class="kn">try</span><span class="p">:</span></td>
@@ -729,14 +729,14 @@
<div class="annotate-info">
<div>
- <a href="/annotate/1af356141006/primes.py#l26">
- 1af356141006</a>
+ <a href="/annotate/f4fca47b67e6/primes.py#l26">
+ f4fca47b67e6</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/1af356141006/primes.py">diff</a>
- <a href="/rev/1af356141006">changeset</a>
+ <a href="/diff/f4fca47b67e6/primes.py">diff</a>
+ <a href="/rev/f4fca47b67e6">changeset</a>
</div>
</td>
<td class="source followlines-btn-parent"><a href="#l26"> 26</a> <span class="n">n</span> <span class="o">=</span> <span class="nb">int</span><span class="p">(</span><span class="n">sys</span><span class="o">.</span><span class="n">argv</span><span class="p">[</span><span class="mi">1</span><span class="p">])</span></td>
@@ -746,14 +746,14 @@
<div class="annotate-info">
<div>
- <a href="/annotate/1af356141006/primes.py#l27">
- 1af356141006</a>
+ <a href="/annotate/f4fca47b67e6/primes.py#l27">
+ f4fca47b67e6</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/1af356141006/primes.py">diff</a>
- <a href="/rev/1af356141006">changeset</a>
+ <a href="/diff/f4fca47b67e6/primes.py">diff</a>
+ <a href="/rev/f4fca47b67e6">changeset</a>
</div>
</td>
<td class="source followlines-btn-parent"><a href="#l27"> 27</a> <span class="kn">except</span> <span class="p">(</span><span class="ne">ValueError</span><span class="p">,</span> <span class="ne">IndexError</span><span class="p">):</span></td>
@@ -763,14 +763,14 @@
<div class="annotate-info">
<div>
- <a href="/annotate/1af356141006/primes.py#l28">
- 1af356141006</a>
+ <a href="/annotate/f4fca47b67e6/primes.py#l28">
+ f4fca47b67e6</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/1af356141006/primes.py">diff</a>
- <a href="/rev/1af356141006">changeset</a>
+ <a href="/diff/f4fca47b67e6/primes.py">diff</a>
+ <a href="/rev/f4fca47b67e6">changeset</a>
</div>
</td>
<td class="source followlines-btn-parent"><a href="#l28"> 28</a> <span class="n">n</span> <span class="o">=</span> <span class="mi">10</span></td>
@@ -780,14 +780,14 @@
<div class="annotate-info">
<div>
- <a href="/annotate/1af356141006/primes.py#l29">
- 1af356141006</a>
+ <a href="/annotate/f4fca47b67e6/primes.py#l29">
+ f4fca47b67e6</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/1af356141006/primes.py">diff</a>
- <a href="/rev/1af356141006">changeset</a>
+ <a href="/diff/f4fca47b67e6/primes.py">diff</a>
+ <a href="/rev/f4fca47b67e6">changeset</a>
</div>
</td>
<td class="source followlines-btn-parent"><a href="#l29"> 29</a> <span class="n">p</span> <span class="o">=</span> <span class="n">primes</span><span class="p">()</span></td>
@@ -797,31 +797,31 @@
<div class="annotate-info">
<div>
- <a href="/annotate/1af356141006/primes.py#l30">
- 1af356141006</a>
+ <a href="/annotate/f4fca47b67e6/primes.py#l30">
+ f4fca47b67e6</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/1af356141006/primes.py">diff</a>
- <a href="/rev/1af356141006">changeset</a>
+ <a href="/diff/f4fca47b67e6/primes.py">diff</a>
+ <a href="/rev/f4fca47b67e6">changeset</a>
</div>
</td>
- <td class="source followlines-btn-parent"><a href="#l30"> 30</a> <span class="kn">print</span> <span class="s">"The first </span><span class="si">%d</span><span class="s"> primes: </span><span class="si">%s</span><span class="s">"</span> <span class="o">%</span> <span class="p">(</span><span class="n">n</span><span class="p">,</span> <span class="nb">list</span><span class="p">(</span><span class="n">islice</span><span class="p">(</span><span class="n">p</span><span class="p">,</span> <span class="n">n</span><span class="p">)))</span></td>
+ <td class="source followlines-btn-parent"><a href="#l30"> 30</a> <span class="kn">print</span><span class="p">(</span><span class="s">"The first </span><span class="si">%d</span><span class="s"> primes: </span><span class="si">%s</span><span class="s">"</span> <span class="o">%</span> <span class="p">(</span><span class="n">n</span><span class="p">,</span> <span class="nb">list</span><span class="p">(</span><span class="n">islice</span><span class="p">(</span><span class="n">p</span><span class="p">,</span> <span class="n">n</span><span class="p">))))</span></td>
</tr>
<tr id="l31" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/1af356141006/primes.py#l31">
- 1af356141006</a>
+ <a href="/annotate/f4fca47b67e6/primes.py#l31">
+ f4fca47b67e6</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/1af356141006/primes.py">diff</a>
- <a href="/rev/1af356141006">changeset</a>
+ <a href="/diff/f4fca47b67e6/primes.py">diff</a>
+ <a href="/rev/f4fca47b67e6">changeset</a>
</div>
</td>
<td class="source followlines-btn-parent"><a href="#l31"> 31</a> </td>
--- a/tests/test-histedit-arguments.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-histedit-arguments.t Sat Sep 30 07:52:48 2017 -0700
@@ -494,7 +494,7 @@
$ cat >>$HGRCPATH <<EOF
> [experimental]
- > evolution=createmarkers,allowunstable
+ > stabilization=createmarkers,allowunstable
> EOF
$ hg commit --amend -m 'allow this fold'
$ hg histedit --continue
--- a/tests/test-histedit-edit.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-histedit-edit.t Sat Sep 30 07:52:48 2017 -0700
@@ -460,7 +460,7 @@
> EOF
$ HGEDITOR="sh ../edit.sh" hg histedit 2
warning: histedit rules saved to: .hg/histedit-last-edit.txt
- hg: parse error: cannot fold into public change 18aa70c8ad22
+ hg: parse error: first changeset cannot use verb "fold"
[255]
$ cat .hg/histedit-last-edit.txt
fold 0012be4a27ea 2 extend a
--- a/tests/test-histedit-fold.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-histedit-fold.t Sat Sep 30 07:52:48 2017 -0700
@@ -294,9 +294,21 @@
[1]
There were conflicts, we keep P1 content. This
should effectively drop the changes from +6.
- $ hg status
+
+ $ hg status -v
M file
? file.orig
+ # The repository is in an unfinished *histedit* state.
+
+ # Unresolved merge conflicts:
+ #
+ # file
+ #
+ # To mark files as resolved: hg resolve --mark FILE
+
+ # To continue: hg histedit --continue
+ # To abort: hg histedit --abort
+
$ hg resolve -l
U file
$ hg revert -r 'p1()' file
@@ -541,3 +553,36 @@
END
$ cd ..
+
+Test rolling into a commit with multiple children (issue5498)
+
+ $ hg init roll
+ $ cd roll
+ $ echo a > a
+ $ hg commit -qAm aa
+ $ echo b > b
+ $ hg commit -qAm bb
+ $ hg up -q ".^"
+ $ echo c > c
+ $ hg commit -qAm cc
+ $ hg log -G -T '{node|short} {desc}'
+ @ 5db65b93a12b cc
+ |
+ | o 301d76bdc3ae bb
+ |/
+ o 8f0162e483d0 aa
+
+
+ $ hg histedit . --commands - << EOF
+ > r 5db65b93a12b
+ > EOF
+ hg: parse error: first changeset cannot use verb "roll"
+ [255]
+ $ hg log -G -T '{node|short} {desc}'
+ @ 5db65b93a12b cc
+ |
+ | o 301d76bdc3ae bb
+ |/
+ o 8f0162e483d0 aa
+
+
--- a/tests/test-histedit-obsolete.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-histedit-obsolete.t Sat Sep 30 07:52:48 2017 -0700
@@ -8,7 +8,7 @@
> [phases]
> publish=False
> [experimental]
- > evolution=createmarkers,allowunstable
+ > stabilization=createmarkers,allowunstable
> [extensions]
> histedit=
> rebase=
@@ -43,23 +43,22 @@
$ hg commit --amend b
$ hg histedit --continue
$ hg log -G
- @ 6:46abc7c4d873 b
+ @ 5:46abc7c4d873 b
|
- o 5:49d44ab2be1b c
+ o 4:49d44ab2be1b c
|
o 0:cb9a9f314b8b a
$ hg debugobsolete
- e72d22b19f8ecf4150ab4f91d0973fd9955d3ddf 49d44ab2be1b67a79127568a67c9c99430633b48 0 (*) {'user': 'test'} (glob)
- 3e30a45cf2f719e96ab3922dfe039cfd047956ce 0 {e72d22b19f8ecf4150ab4f91d0973fd9955d3ddf} (*) {'user': 'test'} (glob)
- 1b2d564fad96311b45362f17c2aa855150efb35f 46abc7c4d8738e8563e577f7889e1b6db3da4199 0 (*) {'user': 'test'} (glob)
- 114f4176969ef342759a8a57e6bccefc4234829b 49d44ab2be1b67a79127568a67c9c99430633b48 0 (*) {'user': 'test'} (glob)
+ e72d22b19f8ecf4150ab4f91d0973fd9955d3ddf 49d44ab2be1b67a79127568a67c9c99430633b48 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
+ 1b2d564fad96311b45362f17c2aa855150efb35f 46abc7c4d8738e8563e577f7889e1b6db3da4199 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'histedit', 'user': 'test'}
+ 114f4176969ef342759a8a57e6bccefc4234829b 49d44ab2be1b67a79127568a67c9c99430633b48 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'histedit', 'user': 'test'}
With some node gone missing during the edit.
$ echo "pick `hg log -r 0 -T '{node|short}'`" > plan
- $ echo "pick `hg log -r 6 -T '{node|short}'`" >> plan
- $ echo "edit `hg log -r 5 -T '{node|short}'`" >> plan
+ $ echo "pick `hg log -r 5 -T '{node|short}'`" >> plan
+ $ echo "edit `hg log -r 4 -T '{node|short}'`" >> plan
$ hg histedit -r 'all()' --commands plan
Editing (49d44ab2be1b), you may commit or record as needed now.
(hg histedit --continue to resume)
@@ -73,21 +72,20 @@
$ hg --hidden --config extensions.strip= strip 'desc(XXXXXX)' --no-backup
$ hg histedit --continue
$ hg log -G
- @ 9:273c1f3b8626 c
+ @ 8:273c1f3b8626 c
|
- o 8:aba7da937030 b2
+ o 7:aba7da937030 b2
|
o 0:cb9a9f314b8b a
$ hg debugobsolete
- e72d22b19f8ecf4150ab4f91d0973fd9955d3ddf 49d44ab2be1b67a79127568a67c9c99430633b48 0 (*) {'user': 'test'} (glob)
- 3e30a45cf2f719e96ab3922dfe039cfd047956ce 0 {e72d22b19f8ecf4150ab4f91d0973fd9955d3ddf} (*) {'user': 'test'} (glob)
- 1b2d564fad96311b45362f17c2aa855150efb35f 46abc7c4d8738e8563e577f7889e1b6db3da4199 0 (*) {'user': 'test'} (glob)
- 114f4176969ef342759a8a57e6bccefc4234829b 49d44ab2be1b67a79127568a67c9c99430633b48 0 (*) {'user': 'test'} (glob)
- 76f72745eac0643d16530e56e2f86e36e40631f1 2ca853e48edbd6453a0674dc0fe28a0974c51b9c 0 (*) {'user': 'test'} (glob)
- 2ca853e48edbd6453a0674dc0fe28a0974c51b9c aba7da93703075eec9fb1dbaf143ff2bc1c49d46 0 (*) {'user': 'test'} (glob)
- 49d44ab2be1b67a79127568a67c9c99430633b48 273c1f3b86267ed3ec684bb13af1fa4d6ba56e02 0 (*) {'user': 'test'} (glob)
- 46abc7c4d8738e8563e577f7889e1b6db3da4199 aba7da93703075eec9fb1dbaf143ff2bc1c49d46 0 (*) {'user': 'test'} (glob)
+ e72d22b19f8ecf4150ab4f91d0973fd9955d3ddf 49d44ab2be1b67a79127568a67c9c99430633b48 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
+ 1b2d564fad96311b45362f17c2aa855150efb35f 46abc7c4d8738e8563e577f7889e1b6db3da4199 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'histedit', 'user': 'test'}
+ 114f4176969ef342759a8a57e6bccefc4234829b 49d44ab2be1b67a79127568a67c9c99430633b48 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'histedit', 'user': 'test'}
+ 76f72745eac0643d16530e56e2f86e36e40631f1 2ca853e48edbd6453a0674dc0fe28a0974c51b9c 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
+ 2ca853e48edbd6453a0674dc0fe28a0974c51b9c aba7da93703075eec9fb1dbaf143ff2bc1c49d46 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
+ 49d44ab2be1b67a79127568a67c9c99430633b48 273c1f3b86267ed3ec684bb13af1fa4d6ba56e02 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'histedit', 'user': 'test'}
+ 46abc7c4d8738e8563e577f7889e1b6db3da4199 aba7da93703075eec9fb1dbaf143ff2bc1c49d46 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'histedit', 'user': 'test'}
$ cd ..
Base setup for the rest of the testing
@@ -170,13 +168,13 @@
o 0:cb9a9f314b8b a
$ hg debugobsolete
- d2ae7f538514cd87c17547b0de4cea71fe1af9fb 0 {cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b} (*) {'user': 'test'} (glob)
- 177f92b773850b59254aa5e923436f921b55483b b346ab9a313db8537ecf96fca3ca3ca984ef3bd7 0 (*) {'user': 'test'} (glob)
- 055a42cdd88768532f9cf79daa407fc8d138de9b 59d9f330561fd6c88b1a6b32f0e45034d88db784 0 (*) {'user': 'test'} (glob)
- e860deea161a2f77de56603b340ebbb4536308ae 59d9f330561fd6c88b1a6b32f0e45034d88db784 0 (*) {'user': 'test'} (glob)
- 652413bf663ef2a641cab26574e46d5f5a64a55a cacdfd884a9321ec4e1de275ef3949fa953a1f83 0 (*) {'user': 'test'} (glob)
- 96e494a2d553dd05902ba1cee1d94d4cb7b8faed 0 {b346ab9a313db8537ecf96fca3ca3ca984ef3bd7} (*) {'user': 'test'} (glob)
- b558abc46d09c30f57ac31e85a8a3d64d2e906e4 0 {96e494a2d553dd05902ba1cee1d94d4cb7b8faed} (*) {'user': 'test'} (glob)
+ d2ae7f538514cd87c17547b0de4cea71fe1af9fb 0 {cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b} (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'histedit', 'user': 'test'}
+ 177f92b773850b59254aa5e923436f921b55483b b346ab9a313db8537ecf96fca3ca3ca984ef3bd7 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'histedit', 'user': 'test'}
+ 055a42cdd88768532f9cf79daa407fc8d138de9b 59d9f330561fd6c88b1a6b32f0e45034d88db784 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'histedit', 'user': 'test'}
+ e860deea161a2f77de56603b340ebbb4536308ae 59d9f330561fd6c88b1a6b32f0e45034d88db784 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'histedit', 'user': 'test'}
+ 652413bf663ef2a641cab26574e46d5f5a64a55a cacdfd884a9321ec4e1de275ef3949fa953a1f83 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'histedit', 'user': 'test'}
+ 96e494a2d553dd05902ba1cee1d94d4cb7b8faed 0 {b346ab9a313db8537ecf96fca3ca3ca984ef3bd7} (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'histedit', 'user': 'test'}
+ b558abc46d09c30f57ac31e85a8a3d64d2e906e4 0 {96e494a2d553dd05902ba1cee1d94d4cb7b8faed} (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'histedit', 'user': 'test'}
Ensure hidden revision does not prevent histedit
@@ -223,12 +221,12 @@
$ echo c >> c
$ hg histedit --continue
- $ hg log -r 'unstable()'
+ $ hg log -r 'orphan()'
11:c13eb81022ca f (no-eol)
stabilise
- $ hg rebase -r 'unstable()' -d .
+ $ hg rebase -r 'orphan()' -d .
rebasing 11:c13eb81022ca "f"
$ hg up tip -q
@@ -545,7 +543,7 @@
|
o 0:cb9a9f314b8b (public) a
- $ hg histedit -r 'b449568bf7fc' --commands - << EOF --config experimental.evolution.track-operation=1
+ $ hg histedit -r 'b449568bf7fc' --commands - << EOF --config experimental.stabilization.track-operation=1
> pick b449568bf7fc 13 f
> pick 7395e1ff83bd 15 h
> pick 6b70183d2492 14 g
@@ -556,7 +554,7 @@
Editing (ee118ab9fa44), you may commit or record as needed now.
(hg histedit --continue to resume)
[1]
- $ hg histedit --continue --config experimental.evolution.track-operation=1
+ $ hg histedit --continue --config experimental.stabilization.track-operation=1
$ hg log -G
@ 23:175d6b286a22 (secret) k
|
--- a/tests/test-hook.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-hook.t Sat Sep 30 07:52:48 2017 -0700
@@ -263,7 +263,6 @@
pulling from ../a
listkeys hook: HG_HOOKNAME=listkeys HG_HOOKTYPE=listkeys HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
no changes found
- listkeys hook: HG_HOOKNAME=listkeys HG_HOOKTYPE=listkeys HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
adding remote bookmark bar
$ cd ../a
@@ -409,6 +408,7 @@
$ cd "$TESTTMP/b"
$ cat > hooktests.py <<EOF
+ > from __future__ import print_function
> from mercurial import error
>
> uncallable = 0
@@ -418,9 +418,9 @@
> args.pop('repo', None)
> a = list(args.items())
> a.sort()
- > print 'hook args:'
+ > print('hook args:')
> for k, v in a:
- > print ' ', k, v
+ > print(' ', k, v)
>
> def passhook(**args):
> printargs(args)
@@ -445,7 +445,7 @@
> ui.note('verbose output from hook\n')
>
> def printtags(ui, repo, **args):
- > print sorted(repo.tags())
+ > print(sorted(repo.tags()))
>
> class container:
> unreachable = 1
@@ -630,7 +630,7 @@
$ cat > hookext.py <<EOF
> def autohook(**args):
- > print "Automatically installed hook"
+ > print("Automatically installed hook")
>
> def reposetup(ui, repo):
> repo.ui.setconfig("hooks", "commit.auto", autohook)
@@ -667,7 +667,7 @@
$ cd hooks
$ cat > testhooks.py <<EOF
> def testhook(**args):
- > print 'hook works'
+ > print('hook works')
> EOF
$ echo '[hooks]' > ../repo/.hg/hgrc
$ echo "pre-commit.test = python:`pwd`/testhooks.py:testhook" >> ../repo/.hg/hgrc
--- a/tests/test-http-bad-server.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-http-bad-server.t Sat Sep 30 07:52:48 2017 -0700
@@ -11,6 +11,8 @@
$ cat >> $HGRCPATH << EOF
> [extensions]
> fakeversion = `pwd`/fakeversion.py
+ > [devel]
+ > legacy.exchange = phases
> EOF
$ hg init server0
@@ -68,10 +70,8 @@
$ hg --config badserver.closeafterrecvbytes=1 serve -p $HGPORT -d --pid-file=hg.pid -E error.log
$ cat hg.pid > $DAEMON_PIDS
-TODO this error message is not very good
-
$ hg clone http://localhost:$HGPORT/ clone
- abort: error: ''
+ abort: error: bad HTTP status line: ''
[255]
$ killdaemons.py $DAEMON_PIDS
@@ -87,7 +87,7 @@
$ hg --config badserver.closeafterrecvbytes=40 serve -p $HGPORT -d --pid-file=hg.pid -E error.log
$ cat hg.pid > $DAEMON_PIDS
$ hg clone http://localhost:$HGPORT/ clone
- abort: error: ''
+ abort: error: bad HTTP status line: ''
[255]
$ killdaemons.py $DAEMON_PIDS
@@ -104,7 +104,7 @@
$ hg --config badserver.closeafterrecvbytes=210 serve -p $HGPORT -d --pid-file=hg.pid -E error.log
$ cat hg.pid > $DAEMON_PIDS
$ hg clone http://localhost:$HGPORT/ clone
- abort: error: ''
+ abort: error: bad HTTP status line: ''
[255]
$ killdaemons.py $DAEMON_PIDS
@@ -143,7 +143,7 @@
$ cat hg.pid > $DAEMON_PIDS
$ hg clone http://localhost:$HGPORT/ clone
requesting all changes
- abort: error: ''
+ abort: error: bad HTTP status line: ''
[255]
$ killdaemons.py $DAEMON_PIDS
@@ -200,7 +200,7 @@
$ cat hg.pid > $DAEMON_PIDS
$ hg clone http://localhost:$HGPORT/ clone
- abort: error: ''
+ abort: error: bad HTTP status line: ''
[255]
$ killdaemons.py $DAEMON_PIDS
@@ -251,7 +251,7 @@
$ cat hg.pid > $DAEMON_PIDS
$ hg clone http://localhost:$HGPORT/ clone
- abort: error: H
+ abort: error: bad HTTP status line: H
[255]
$ killdaemons.py $DAEMON_PIDS
--- a/tests/test-http-proxy.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-http-proxy.t Sat Sep 30 07:52:48 2017 -0700
@@ -105,16 +105,16 @@
* - - [*] "GET http://localhost:$HGPORT/?cmd=branchmap HTTP/1.1" - - x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
* - - [*] "GET http://localhost:$HGPORT/?cmd=stream_out HTTP/1.1" - - x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
* - - [*] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D83180e7845de420a1bb46896fd5fe05294f8d629 x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=0&common=83180e7845de420a1bb46896fd5fe05294f8d629&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=phases%2Cbookmarks x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=0&common=83180e7845de420a1bb46896fd5fe05294f8d629&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
* - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
* - - [*] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=phases%2Cbookmarks x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=*,zlib,none,bzip2 (glob)
* - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
* - - [*] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=phases%2Cbookmarks x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=*,zlib,none,bzip2 (glob)
* - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
* - - [*] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=phases%2Cbookmarks x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=*,zlib,none,bzip2 (glob)
* - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
* - - [*] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=phases%2Cbookmarks x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=*,zlib,none,bzip2 (glob)
--- a/tests/test-http.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-http.t Sat Sep 30 07:52:48 2017 -0700
@@ -284,11 +284,11 @@
"GET /?cmd=stream_out HTTP/1.1" 401 - x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
"GET /?cmd=stream_out HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
"GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D5fed3813f7f5e1824344fdc9cf8f63bb662c292d x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=0&common=5fed3813f7f5e1824344fdc9cf8f63bb662c292d&heads=5fed3813f7f5e1824344fdc9cf8f63bb662c292d&listkeys=phases%2Cbookmarks x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=0&common=5fed3813f7f5e1824344fdc9cf8f63bb662c292d&heads=5fed3813f7f5e1824344fdc9cf8f63bb662c292d&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
"GET /?cmd=capabilities HTTP/1.1" 200 -
"GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=getbundle HTTP/1.1" 401 - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=5fed3813f7f5e1824344fdc9cf8f63bb662c292d&listkeys=phases%2Cbookmarks x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=5fed3813f7f5e1824344fdc9cf8f63bb662c292d&listkeys=phases%2Cbookmarks x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ "GET /?cmd=getbundle HTTP/1.1" 401 - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=5fed3813f7f5e1824344fdc9cf8f63bb662c292d&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=5fed3813f7f5e1824344fdc9cf8f63bb662c292d&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
"GET /?cmd=capabilities HTTP/1.1" 200 -
"GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
"GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
--- a/tests/test-https.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-https.t Sat Sep 30 07:52:48 2017 -0700
@@ -624,7 +624,6 @@
$ P="$CERTSDIR" hg id https://localhost:$HGPORT/
warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
- (the full certificate chain may not be available locally; see "hg help debugssl") (windows !)
abort: error: *handshake failure* (glob)
[255]
--- a/tests/test-impexp-branch.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-impexp-branch.t Sat Sep 30 07:52:48 2017 -0700
@@ -2,7 +2,9 @@
$ echo 'strip =' >> $HGRCPATH
$ cat >findbranch.py <<EOF
- > import re, sys
+ > from __future__ import absolute_import
+ > import re
+ > import sys
>
> head_re = re.compile('^#(?:(?:\\s+([A-Za-z][A-Za-z0-9_]*)(?:\\s.*)?)|(?:\\s*))$')
>
--- a/tests/test-imports-checker.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-imports-checker.t Sat Sep 30 07:52:48 2017 -0700
@@ -125,7 +125,19 @@
> from mercurial.node import hex
> EOF
- $ $PYTHON "$import_checker" testpackage*/*.py testpackage/subpackage/*.py
+# Shadowing a stdlib module to test "relative import of stdlib module" is
+# allowed if the module is also being checked
+
+ $ mkdir email
+ $ touch email/__init__.py
+ $ touch email/errors.py
+ $ cat > email/utils.py << EOF
+ > from __future__ import absolute_import
+ > from . import errors
+ > EOF
+
+ $ $PYTHON "$import_checker" testpackage*/*.py testpackage/subpackage/*.py \
+ > email/*.py
testpackage/importalias.py:2: ui module must be "as" aliased to uimod
testpackage/importfromalias.py:2: ui from testpackage must be "as" aliased to uimod
testpackage/importfromrelative.py:2: import should be relative: testpackage.unsorted
--- a/tests/test-inherit-mode.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-inherit-mode.t Sat Sep 30 07:52:48 2017 -0700
@@ -10,7 +10,9 @@
$ cd dir
$ cat >printmodes.py <<EOF
- > import os, sys
+ > from __future__ import absolute_import, print_function
+ > import os
+ > import sys
>
> allnames = []
> isdir = {}
@@ -25,13 +27,14 @@
> allnames.sort()
> for name in allnames:
> suffix = name in isdir and '/' or ''
- > print '%05o %s%s' % (os.lstat(name).st_mode & 07777, name, suffix)
+ > print('%05o %s%s' % (os.lstat(name).st_mode & 0o7777, name, suffix))
> EOF
$ cat >mode.py <<EOF
- > import sys
+ > from __future__ import absolute_import, print_function
> import os
- > print '%05o' % os.lstat(sys.argv[1]).st_mode
+ > import sys
+ > print('%05o' % os.lstat(sys.argv[1]).st_mode)
> EOF
$ umask 077
--- a/tests/test-install.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-install.t Sat Sep 30 07:52:48 2017 -0700
@@ -76,6 +76,19 @@
1 problems detected, please check your install!
[1]
+hg debuginstall with invalid encoding
+ $ HGENCODING=invalidenc hg debuginstall | grep encoding
+ checking encoding (invalidenc)...
+ unknown encoding: invalidenc
+
+exception message in JSON
+
+ $ HGENCODING=invalidenc HGUSER= hg debuginstall -Tjson | grep error
+ "defaulttemplateerror": null,
+ "encodingerror": "unknown encoding: invalidenc",
+ "extensionserror": null, (no-pure !)
+ "usernameerror": "no username supplied",
+
path variables are expanded (~ is the same as $TESTTMP)
$ mkdir tools
$ touch tools/testeditor.exe
--- a/tests/test-issue4074.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-issue4074.t Sat Sep 30 07:52:48 2017 -0700
@@ -8,7 +8,7 @@
> print
> if random.randint(0, 100) >= 50:
> x += 1
- > print hex(x)
+ > print(hex(x))
> EOF
$ hg init a
--- a/tests/test-keyword.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-keyword.t Sat Sep 30 07:52:48 2017 -0700
@@ -260,8 +260,8 @@
adding manifests
adding file changes
added 2 changesets with 3 changes to 3 files
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Date: * (glob)
Subject: changeset in...
@@ -283,8 +283,8 @@
@@ -0,0 +1,1 @@
+a
\ No newline at end of file
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Date:* (glob)
Subject: changeset in...
@@ -1412,7 +1412,7 @@
$ mv $HGRCPATH.new $HGRCPATH
>>> from __future__ import print_function
- >>> from hgclient import readchannel, runcommand, check
+ >>> from hgclient import check, readchannel, runcommand
>>> @check
... def check(server):
... # hello block
--- a/tests/test-largefiles-cache.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-largefiles-cache.t Sat Sep 30 07:52:48 2017 -0700
@@ -94,9 +94,11 @@
$ cat > ls-l.py <<EOF
> #!$PYTHON
- > import sys, os
+ > from __future__ import absolute_import, print_function
+ > import os
+ > import sys
> path = sys.argv[1]
- > print '%03o' % (os.lstat(path).st_mode & 0777)
+ > print('%03o' % (os.lstat(path).st_mode & 0o777))
> EOF
$ chmod +x ls-l.py
--- a/tests/test-largefiles-misc.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-largefiles-misc.t Sat Sep 30 07:52:48 2017 -0700
@@ -528,13 +528,13 @@
$ echo moremore >> anotherlarge
$ hg revert anotherlarge -v --config 'ui.origbackuppath=.hg/origbackups'
creating directory: $TESTTMP/addrm2/.hg/origbackups/.hglf/sub (glob)
- saving current version of ../.hglf/sub/anotherlarge as $TESTTMP/addrm2/.hg/origbackups/.hglf/sub/anotherlarge.orig (glob)
+ saving current version of ../.hglf/sub/anotherlarge as $TESTTMP/addrm2/.hg/origbackups/.hglf/sub/anotherlarge (glob)
reverting ../.hglf/sub/anotherlarge (glob)
creating directory: $TESTTMP/addrm2/.hg/origbackups/sub (glob)
found 90c622cf65cebe75c5842f9136c459333faf392e in store
found 90c622cf65cebe75c5842f9136c459333faf392e in store
$ ls ../.hg/origbackups/sub
- anotherlarge.orig
+ anotherlarge
$ cd ..
Test glob logging from the root dir
--- a/tests/test-largefiles-small-disk.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-largefiles-small-disk.t Sat Sep 30 07:52:48 2017 -0700
@@ -1,7 +1,10 @@
Test how largefiles abort in case the disk runs full
$ cat > criple.py <<EOF
- > import os, errno, shutil
+ > from __future__ import absolute_import
+ > import errno
+ > import os
+ > import shutil
> from mercurial import util
> #
> # this makes the original largefiles code abort:
--- a/tests/test-largefiles-wireproto.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-largefiles-wireproto.t Sat Sep 30 07:52:48 2017 -0700
@@ -226,7 +226,8 @@
added 1 changesets with 1 changes to 1 files
Archive contains largefiles
- >>> import urllib2, os
+ >>> import os
+ >>> import urllib2
>>> u = 'http://localhost:%s/archive/default.zip' % os.environ['HGPORT2']
>>> with open('archive.zip', 'w') as f:
... f.write(urllib2.urlopen(u).read())
--- a/tests/test-lfconvert.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-lfconvert.t Sat Sep 30 07:52:48 2017 -0700
@@ -128,7 +128,7 @@
$ hg merge
merging sub/maybelarge.dat and stuff/maybelarge.dat to stuff/maybelarge.dat
merging sub/normal2 and stuff/normal2 to stuff/normal2
- warning: $TESTTMP/bigfile-repo/stuff/maybelarge.dat looks like a binary file. (glob)
+ warning: stuff/maybelarge.dat looks like a binary file.
warning: conflicts while merging stuff/maybelarge.dat! (edit, then use 'hg resolve --mark')
0 files updated, 1 files merged, 0 files removed, 1 files unresolved
use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
@@ -326,7 +326,7 @@
$ cd largefiles-repo-hg
$ cat >> .hg/hgrc <<EOF
> [experimental]
- > evolution=createmarkers
+ > stabilization=createmarkers
> EOF
$ hg debugobsolete `hg log -r tip -T "{node}"`
obsoleted 1 changesets
--- a/tests/test-log-exthook.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-log-exthook.t Sat Sep 30 07:52:48 2017 -0700
@@ -2,8 +2,12 @@
-------------------------------------------
$ cat > $TESTTMP/logexthook.py <<EOF
- > from mercurial import repair, commands
- > from mercurial import cmdutil
+ > from __future__ import absolute_import
+ > from mercurial import (
+ > cmdutil,
+ > commands,
+ > repair,
+ > )
> def rot13description(self, ctx):
> summary = "summary".encode('rot13')
> description = ctx.description().strip().splitlines()[0].encode('rot13')
--- a/tests/test-log.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-log.t Sat Sep 30 07:52:48 2017 -0700
@@ -1700,7 +1700,7 @@
$ cat >> $HGRCPATH << EOF
> [experimental]
- > evolution=createmarkers
+ > stabilization=createmarkers
> EOF
$ hg log --template='{rev}:{node}\n'
@@ -1793,7 +1793,7 @@
$ cd problematicencoding
$ $PYTHON > setup.sh <<EOF
- > print u'''
+ > print(u'''
> echo a > text
> hg add text
> hg --encoding utf-8 commit -u '\u30A2' -m none
@@ -1803,13 +1803,13 @@
> hg --encoding utf-8 commit -u none -m '\u30A2'
> echo d > text
> hg --encoding utf-8 commit -u none -m '\u30C2'
- > '''.encode('utf-8')
+ > '''.encode('utf-8'))
> EOF
$ sh < setup.sh
test in problematic encoding
$ $PYTHON > test.sh <<EOF
- > print u'''
+ > print(u'''
> hg --encoding cp932 log --template '{rev}\\n' -u '\u30A2'
> echo ====
> hg --encoding cp932 log --template '{rev}\\n' -u '\u30C2'
@@ -1817,7 +1817,7 @@
> hg --encoding cp932 log --template '{rev}\\n' -k '\u30A2'
> echo ====
> hg --encoding cp932 log --template '{rev}\\n' -k '\u30C2'
- > '''.encode('cp932')
+ > '''.encode('cp932'))
> EOF
$ sh < test.sh
0
@@ -2027,7 +2027,8 @@
$ cat > ../names.py <<EOF
> """A small extension to test adding arbitrary names to a repo"""
- > from mercurial.namespaces import namespace
+ > from __future__ import absolute_import
+ > from mercurial import namespaces
>
> def reposetup(ui, repo):
> foo = {'foo': repo[0].node()}
@@ -2035,9 +2036,10 @@
> namemap = lambda r, name: foo.get(name)
> nodemap = lambda r, node: [name for name, n in foo.iteritems()
> if n == node]
- > ns = namespace("bars", templatename="bar", logname="barlog",
- > colorname="barcolor", listnames=names, namemap=namemap,
- > nodemap=nodemap)
+ > ns = namespaces.namespace(
+ > "bars", templatename="bar", logname="barlog",
+ > colorname="barcolor", listnames=names, namemap=namemap,
+ > nodemap=nodemap)
>
> repo.names.addnamespace(ns)
> EOF
@@ -2280,7 +2282,7 @@
$ hg init issue4490
$ cd issue4490
$ echo '[experimental]' >> .hg/hgrc
- $ echo 'evolution=createmarkers' >> .hg/hgrc
+ $ echo 'stabilization=createmarkers' >> .hg/hgrc
$ echo a > a
$ hg ci -Am0
adding a
@@ -2298,14 +2300,14 @@
$ hg up 'head() and not .'
1 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ hg log -G
- o changeset: 4:db815d6d32e6
+ o changeset: 3:db815d6d32e6
| tag: tip
| parent: 0:f7b1eb17ad24
| user: test
| date: Thu Jan 01 00:00:00 1970 +0000
| summary: 2
|
- | @ changeset: 3:9bc8ce7f9356
+ | @ changeset: 2:9bc8ce7f9356
|/ parent: 0:f7b1eb17ad24
| user: test
| date: Thu Jan 01 00:00:00 1970 +0000
@@ -2317,14 +2319,14 @@
summary: 0
$ hg log -f -G b
- @ changeset: 3:9bc8ce7f9356
+ @ changeset: 2:9bc8ce7f9356
| parent: 0:f7b1eb17ad24
~ user: test
date: Thu Jan 01 00:00:00 1970 +0000
summary: 1
$ hg log -G b
- @ changeset: 3:9bc8ce7f9356
+ @ changeset: 2:9bc8ce7f9356
| parent: 0:f7b1eb17ad24
~ user: test
date: Thu Jan 01 00:00:00 1970 +0000
--- a/tests/test-logtoprocess.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-logtoprocess.t Sat Sep 30 07:52:48 2017 -0700
@@ -8,8 +8,8 @@
$ hg init
$ cat > $TESTTMP/foocommand.py << EOF
+ > from __future__ import absolute_import
> from mercurial import registrar
- > from time import sleep
> cmdtable = {}
> command = registrar.command(cmdtable)
> @command(b'foo', [])
--- a/tests/test-merge-local.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-merge-local.t Sat Sep 30 07:52:48 2017 -0700
@@ -110,7 +110,7 @@
Are orig files from the last commit where we want them?
$ ls .hg/origbackups
- zzz2_merge_bad.orig
+ zzz2_merge_bad
$ hg diff --nodates | grep "^[+-][^<>]"
--- a/zzz1_merge_ok
--- a/tests/test-merge-symlinks.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-merge-symlinks.t Sat Sep 30 07:52:48 2017 -0700
@@ -1,6 +1,8 @@
$ cat > echo.py <<EOF
> #!$PYTHON
- > import os, sys
+ > from __future__ import absolute_import, print_function
+ > import os
+ > import sys
> try:
> import msvcrt
> msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
@@ -9,7 +11,7 @@
> pass
>
> for k in ('HG_FILE', 'HG_MY_ISLINK', 'HG_OTHER_ISLINK', 'HG_BASE_ISLINK'):
- > print k, os.environ[k]
+ > print(k, os.environ[k])
> EOF
Create 2 heads containing the same file, once as
--- a/tests/test-merge1.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-merge1.t Sat Sep 30 07:52:48 2017 -0700
@@ -1,4 +1,5 @@
$ cat <<EOF > merge
+ > from __future__ import print_function
> import sys, os
>
> try:
@@ -8,7 +9,7 @@
> except ImportError:
> pass
>
- > print "merging for", os.path.basename(sys.argv[1])
+ > print("merging for", os.path.basename(sys.argv[1]))
> EOF
$ HGMERGE="$PYTHON ../merge"; export HGMERGE
@@ -339,9 +340,14 @@
isn't changed on the filesystem (see also issue4583).
$ cat > $TESTTMP/abort.py <<EOF
+ > from __future__ import absolute_import
> # emulate aborting before "recordupdates()". in this case, files
> # are changed without updating dirstate
- > from mercurial import extensions, merge, error
+ > from mercurial import (
+ > error,
+ > extensions,
+ > merge,
+ > )
> def applyupdates(orig, *args, **kwargs):
> orig(*args, **kwargs)
> raise error.Abort('intentional aborting')
--- a/tests/test-merge6.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-merge6.t Sat Sep 30 07:52:48 2017 -0700
@@ -1,6 +1,6 @@
$ cat <<EOF > merge
> import sys, os
- > print "merging for", os.path.basename(sys.argv[1])
+ > print("merging for", os.path.basename(sys.argv[1]))
> EOF
$ HGMERGE="$PYTHON ../merge"; export HGMERGE
--- a/tests/test-mq-eol.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-mq-eol.t Sat Sep 30 07:52:48 2017 -0700
@@ -33,7 +33,7 @@
> for line in file(sys.argv[1], 'rb'):
> line = line.replace('\r', '<CR>')
> line = line.replace('\n', '<LF>')
- > print line
+ > print(line)
> EOF
$ hg init repo
--- a/tests/test-mq-qpush-fail.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-mq-qpush-fail.t Sat Sep 30 07:52:48 2017 -0700
@@ -465,7 +465,7 @@
test previous qpop (with --force and --config) saved .orig files to where user
wants them
$ ls .hg/origbackups
- b.orig
+ b
$ rm -rf .hg/origbackups
$ cd ..
--- a/tests/test-mq.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-mq.t Sat Sep 30 07:52:48 2017 -0700
@@ -1388,7 +1388,7 @@
$ hg qpush -f --verbose --config 'ui.origbackuppath=.hg/origbackups'
applying empty
creating directory: $TESTTMP/forcepush/.hg/origbackups (glob)
- saving current version of hello.txt as $TESTTMP/forcepush/.hg/origbackups/hello.txt.orig (glob)
+ saving current version of hello.txt as $TESTTMP/forcepush/.hg/origbackups/hello.txt (glob)
patching file hello.txt
committing files:
hello.txt
@@ -1422,7 +1422,7 @@
test that the previous call to qpush with -f (--force) and --config actually put
the orig files out of the working copy
$ ls .hg/origbackups
- hello.txt.orig
+ hello.txt
test popping revisions not in working dir ancestry
--- a/tests/test-notify-changegroup.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-notify-changegroup.t Sat Sep 30 07:52:48 2017 -0700
@@ -39,15 +39,15 @@
push
$ hg --traceback --cwd b push ../a 2>&1 |
- > $PYTHON -c 'import sys,re; print re.sub("\n\t", " ", sys.stdin.read()),'
+ > $PYTHON -c 'from __future__ import print_function ; import sys,re; print(re.sub("\n\t", " ", sys.stdin.read()), end="")'
pushing to ../a
searching for changes
adding changesets
adding manifests
adding file changes
added 2 changesets with 2 changes to 1 files
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Date: * (glob)
Subject: * (glob)
@@ -92,13 +92,13 @@
unbundle with correct source
$ hg --config notify.sources=unbundle --cwd a unbundle ../test.hg 2>&1 |
- > $PYTHON -c 'import sys,re; print re.sub("\n\t", " ", sys.stdin.read()),'
+ > $PYTHON -c 'from __future__ import print_function ; import sys,re; print(re.sub("\n\t", " ", sys.stdin.read()), end="")'
adding changesets
adding manifests
adding file changes
added 2 changesets with 2 changes to 1 files
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Date: * (glob)
Subject: * (glob)
@@ -167,15 +167,15 @@
push
$ hg --traceback --cwd b --config notify.fromauthor=True push ../a 2>&1 |
- > $PYTHON -c 'import sys,re; print re.sub("\n\t", " ", sys.stdin.read()),'
+ > $PYTHON -c 'from __future__ import print_function ; import sys,re; print(re.sub("\n\t", " ", sys.stdin.read()), end="")'
pushing to ../a
searching for changes
adding changesets
adding manifests
adding file changes
added 4 changesets with 4 changes to 1 files
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Date: * (glob)
Subject: * (glob)
--- a/tests/test-notify.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-notify.t Sat Sep 30 07:52:48 2017 -0700
@@ -1,3 +1,9 @@
+ $ cat > $TESTTMP/filter.py <<EOF
+ > from __future__ import absolute_import, print_function
+ > import re
+ > import sys
+ > print(re.sub("\n[ \t]", " ", sys.stdin.read()), end="")
+ > EOF
$ cat <<EOF >> $HGRCPATH
> [extensions]
@@ -175,16 +181,15 @@
of the very long subject line
pull (minimal config)
- $ hg --traceback --cwd b pull ../a | \
- > $PYTHON -c 'import sys,re; print re.sub("\n[\t ]", " ", sys.stdin.read()),'
+ $ hg --traceback --cwd b pull ../a | $PYTHON $TESTTMP/filter.py
pulling from ../a
searching for changes
adding changesets
adding manifests
adding file changes
added 1 changesets with 1 changes to 1 files
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Date: * (glob)
Subject: changeset in $TESTTMP/b: b
@@ -205,6 +210,7 @@
@@ -1,1 +1,2 @@ a
+a
(run 'hg update' to get a working copy)
+
$ cat <<EOF >> $HGRCPATH
> [notify]
> config = `pwd`/.notify.conf
@@ -228,16 +234,15 @@
$ hg --cwd b rollback
repository tip rolled back to revision 0 (undo pull)
- $ hg --traceback --cwd b pull ../a | \
- > $PYTHON -c 'import sys,re; print re.sub("\n\t", " ", sys.stdin.read()),'
+ $ hg --traceback --cwd b pull ../a | $PYTHON $TESTTMP/filter.py
pulling from ../a
searching for changes
adding changesets
adding manifests
adding file changes
added 1 changesets with 1 changes to 1 files
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
X-Test: foo
Date: * (glob)
@@ -254,8 +259,7 @@
diff -r cb9a9f314b8b -r 0647d048b600 a
--- a/a Thu Jan 01 00:00:00 1970 +0000
+++ b/a Thu Jan 01 00:00:01 1970 +0000
- @@ -1,1 +1,2 @@
- a
+ @@ -1,1 +1,2 @@ a
+a
(run 'hg update' to get a working copy)
@@ -272,16 +276,15 @@
$ hg --cwd b rollback
repository tip rolled back to revision 0 (undo pull)
- $ hg --traceback --cwd b pull ../a | \
- > $PYTHON -c 'import sys,re; print re.sub("\n\t", " ", sys.stdin.read()),'
+ $ hg --traceback --cwd b pull ../a | $PYTHON $TESTTMP/filter.py
pulling from ../a
searching for changes
adding changesets
adding manifests
adding file changes
added 1 changesets with 1 changes to 1 files
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
X-Test: foo
Date: * (glob)
@@ -294,17 +297,14 @@
changeset 0647d048b600 in b
description: b
diffstat:
-
- a | 1 +
- 1 files changed, 1 insertions(+), 0 deletions(-)
+ a | 1 + 1 files changed, 1 insertions(+), 0 deletions(-)
diffs (6 lines):
diff -r cb9a9f314b8b -r 0647d048b600 a
--- a/a Thu Jan 01 00:00:00 1970 +0000
+++ b/a Thu Jan 01 00:00:01 1970 +0000
- @@ -1,1 +1,2 @@
- a
+ @@ -1,1 +1,2 @@ a
+a
(run 'hg update' to get a working copy)
@@ -321,16 +321,15 @@
(branch merge, don't forget to commit)
$ hg ci -m merge -d '3 0'
$ cd ..
- $ hg --traceback --cwd b pull ../a | \
- > $PYTHON -c 'import sys,re; print re.sub("\n\t", " ", sys.stdin.read()),'
+ $ hg --traceback --cwd b pull ../a | $PYTHON $TESTTMP/filter.py
pulling from ../a
searching for changes
adding changesets
adding manifests
adding file changes
added 2 changesets with 0 changes to 0 files
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
X-Test: foo
Date: * (glob)
@@ -343,20 +342,17 @@
changeset 0a184ce6067f in b
description: adda2
diffstat:
-
- a | 1 +
- 1 files changed, 1 insertions(+), 0 deletions(-)
+ a | 1 + 1 files changed, 1 insertions(+), 0 deletions(-)
diffs (6 lines):
diff -r cb9a9f314b8b -r 0a184ce6067f a
--- a/a Thu Jan 01 00:00:00 1970 +0000
+++ b/a Thu Jan 01 00:00:02 1970 +0000
- @@ -1,1 +1,2 @@
- a
+ @@ -1,1 +1,2 @@ a
+a
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
X-Test: foo
Date: * (glob)
@@ -380,15 +376,15 @@
$ hg --cwd a --encoding utf-8 commit -A -d '0 0' \
> -m `$PYTHON -c 'print "\xc3\xa0\xc3\xa1\xc3\xa2\xc3\xa3\xc3\xa4"'`
$ hg --traceback --cwd b --encoding utf-8 pull ../a | \
- > $PYTHON -c 'import sys,re; print re.sub("\n\t", " ", sys.stdin.read()),'
+ > $PYTHON $TESTTMP/filter.py
pulling from ../a
searching for changes
adding changesets
adding manifests
adding file changes
added 1 changesets with 1 changes to 1 files
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 8bit
X-Test: foo
Date: * (glob)
@@ -401,18 +397,14 @@
changeset 7ea05ad269dc in b
description: \xc3\xa0\xc3\xa1\xc3\xa2\xc3\xa3\xc3\xa4 (esc)
diffstat:
-
- a | 1 +
- 1 files changed, 1 insertions(+), 0 deletions(-)
+ a | 1 + 1 files changed, 1 insertions(+), 0 deletions(-)
diffs (7 lines):
diff -r 6a0cf76b2701 -r 7ea05ad269dc a
--- a/a Thu Jan 01 00:00:03 1970 +0000
+++ b/a Thu Jan 01 00:00:00 1970 +0000
- @@ -1,2 +1,3 @@
- a
- a
+ @@ -1,2 +1,3 @@ a a
+a
(run 'hg update' to get a working copy)
@@ -424,7 +416,7 @@
> test = False
> mbox = mbox
> EOF
- $ $PYTHON -c 'file("a/a", "ab").write("no" * 500 + "\n")'
+ $ $PYTHON -c 'file("a/a", "ab").write("no" * 500 + "\xd1\x84" + "\n")'
$ hg --cwd a commit -A -m "long line"
$ hg --traceback --cwd b pull ../a
pulling from ../a
@@ -435,35 +427,30 @@
added 1 changesets with 1 changes to 1 files
notify: sending 2 subscribers 1 changes
(run 'hg update' to get a working copy)
- $ $PYTHON -c 'import sys,re; print re.sub("\n\t", " ", file("b/mbox").read()),'
+ $ $PYTHON $TESTTMP/filter.py < b/mbox
From test@test.com ... ... .. ..:..:.. .... (re)
- Content-Type: text/plain; charset="us-ascii"
MIME-Version: 1.0
+ Content-Type: text/plain; charset="*" (glob)
Content-Transfer-Encoding: quoted-printable
X-Test: foo
Date: * (glob)
Subject: long line
From: test@test.com
- X-Hg-Notification: changeset e0be44cf638b
- Message-Id: <hg.e0be44cf638b.*.*@*> (glob)
+ X-Hg-Notification: changeset a323cae54f6e
+ Message-Id: <hg.a323cae54f6e.*.*@*> (glob)
To: baz@test.com, foo@bar
- changeset e0be44cf638b in b
+ changeset a323cae54f6e in b
description: long line
diffstat:
-
- a | 1 +
- 1 files changed, 1 insertions(+), 0 deletions(-)
+ a | 1 + 1 files changed, 1 insertions(+), 0 deletions(-)
diffs (8 lines):
- diff -r 7ea05ad269dc -r e0be44cf638b a
+ diff -r 7ea05ad269dc -r a323cae54f6e a
--- a/a Thu Jan 01 00:00:00 1970 +0000
+++ b/a Thu Jan 01 00:00:00 1970 +0000
- @@ -1,3 +1,4 @@
- a
- a
- a
+ @@ -1,3 +1,4 @@ a a a
+nonononononononononononononononononononononononononononononononononononono=
nononononononononononononononononononononononononononononononononononononon=
ononononononononononononononononononononononononononononononononononononono=
@@ -477,7 +464,7 @@
ononononononononononononononononononononononononononononononononononononono=
nononononononononononononononononononononononononononononononononononononon=
ononononononononononononononononononononononononononononononononononononono=
- nonononononononononononono
+ nonononononononononononono=D1=84
revset selection: send to address that matches branch and repo
@@ -500,26 +487,25 @@
(branches are permanent and global, did you want a bookmark?)
$ echo a >> a/a
$ hg --cwd a ci -m test -d '1 0'
- $ hg --traceback --cwd b pull ../a | \
- > $PYTHON -c 'import sys,re; print re.sub("\n\t", " ", sys.stdin.read()),'
+ $ hg --traceback --cwd b pull ../a | $PYTHON $TESTTMP/filter.py
pulling from ../a
searching for changes
adding changesets
adding manifests
adding file changes
added 1 changesets with 1 changes to 1 files
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
X-Test: foo
Date: * (glob)
Subject: test
From: test@test.com
- X-Hg-Notification: changeset fbbcbc516f2f
- Message-Id: <hg.fbbcbc516f2f.*.*@*> (glob)
+ X-Hg-Notification: changeset b7cf10b2bdec
+ Message-Id: <hg.b7cf10b2bdec.*.*@*> (glob)
To: baz@test.com, foo@bar, notify@example.com
- changeset fbbcbc516f2f in b
+ changeset b7cf10b2bdec in b
description: test
(run 'hg update' to get a working copy)
@@ -530,26 +516,25 @@
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ echo a >> a/a
$ hg --cwd a ci -m test -d '1 0'
- $ hg --traceback --cwd b pull ../a | \
- > $PYTHON -c 'import sys,re; print re.sub("\n\t", " ", sys.stdin.read()),'
+ $ hg --traceback --cwd b pull ../a | $PYTHON $TESTTMP/filter.py
pulling from ../a
searching for changes
adding changesets
adding manifests
adding file changes
added 1 changesets with 0 changes to 0 files (+1 heads)
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
X-Test: foo
Date: * (glob)
Subject: test
From: test@test.com
- X-Hg-Notification: changeset 38b42fa092de
- Message-Id: <hg.38b42fa092de.*.*@*> (glob)
+ X-Hg-Notification: changeset 5a07df312a79
+ Message-Id: <hg.5a07df312a79.*.*@*> (glob)
To: baz@test.com, foo@bar
- changeset 38b42fa092de in b
+ changeset 5a07df312a79 in b
description: test
(run 'hg heads' to see heads)
@@ -559,20 +544,19 @@
$ mv "$HGRCPATH.new" $HGRCPATH
$ echo a >> a/a
$ hg --cwd a commit -m 'default template'
- $ hg --cwd b pull ../a -q | \
- > $PYTHON -c 'import sys,re; print re.sub("\n\t", " ", sys.stdin.read()),'
+ $ hg --cwd b pull ../a -q | $PYTHON $TESTTMP/filter.py
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Date: * (glob)
Subject: changeset in b: default template
From: test@test.com
- X-Hg-Notification: changeset 3548c9e294b6
- Message-Id: <hg.3548c9e294b6.*.*@*> (glob)
+ X-Hg-Notification: changeset f5e8ec95bf59
+ Message-Id: <hg.f5e8ec95bf59.*.*@*> (glob)
To: baz@test.com, foo@bar
- changeset 3548c9e294b6 in $TESTTMP/b (glob)
- details: http://test/b?cmd=changeset;node=3548c9e294b6
+ changeset f5e8ec95bf59 in $TESTTMP/b (glob)
+ details: http://test/b?cmd=changeset;node=f5e8ec95bf59
description: default template
with style:
@@ -589,19 +573,18 @@
> EOF
$ echo a >> a/a
$ hg --cwd a commit -m 'with style'
- $ hg --cwd b pull ../a -q | \
- > $PYTHON -c 'import sys,re; print re.sub("\n\t", " ", sys.stdin.read()),'
+ $ hg --cwd b pull ../a -q | $PYTHON $TESTTMP/filter.py
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Date: * (glob)
Subject: with style
From: test@test.com
- X-Hg-Notification: changeset e917dbd961d3
- Message-Id: <hg.e917dbd961d3.*.*@*> (glob)
+ X-Hg-Notification: changeset 9e2c3a8e9c43
+ Message-Id: <hg.9e2c3a8e9c43.*.*@*> (glob)
To: baz@test.com, foo@bar
- changeset e917dbd961d3
+ changeset 9e2c3a8e9c43
with template (overrides style):
@@ -613,16 +596,15 @@
> EOF
$ echo a >> a/a
$ hg --cwd a commit -m 'with template'
- $ hg --cwd b pull ../a -q | \
- > $PYTHON -c 'import sys,re; print re.sub("\n\t", " ", sys.stdin.read()),'
+ $ hg --cwd b pull ../a -q | $PYTHON $TESTTMP/filter.py
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Date: * (glob)
- Subject: a09743fd3edd: with template
+ Subject: e2cbf5bf18a7: with template
From: test@test.com
- X-Hg-Notification: changeset a09743fd3edd
- Message-Id: <hg.a09743fd3edd.*.*@*> (glob)
+ X-Hg-Notification: changeset e2cbf5bf18a7
+ Message-Id: <hg.e2cbf5bf18a7.*.*@*> (glob)
To: baz@test.com, foo@bar
with template
--- a/tests/test-obsmarker-template.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-obsmarker-template.t Sat Sep 30 07:52:48 2017 -0700
@@ -10,7 +10,14 @@
> [phases]
> publish=False
> [experimental]
- > evolution=all
+ > stabilization=all
+ > [templates]
+ > obsfatesuccessors = "{if(successors, " as ")}{join(successors, ", ")}"
+ > obsfateverb = "{obsfateverb(successors)}"
+ > obsfateoperations = "{if(obsfateoperations(markers), " using {join(obsfateoperations(markers), ", ")}")}"
+ > obsfateusers = "{if(obsfateusers(markers), " by {join(obsfateusers(markers), ", ")}")}"
+ > obsfatedate = "{if(obsfatedate(markers), "{ifeq(min(obsfatedate(markers)), max(obsfatedate(markers)), " (at {min(obsfatedate(markers))|isodate})", " (between {min(obsfatedate(markers))|isodate} and {max(obsfatedate(markers))|isodate})")}")}"
+ > obsfate = "{obsfateverb}{obsfateoperations}{obsfatesuccessors}{obsfateusers}{obsfatedate}; "
> [alias]
> tlog = log -G -T '{node|short}\
> {if(predecessors, "\n Predecessors: {predecessors}")}\
@@ -20,6 +27,8 @@
> {if(successorssets, "\n Successors: {successorssets}")}\
> {if(successorssets, "\n multi-line: {join(successorssets, "\n multi-line: ")}")}\
> {if(successorssets, "\n json: {successorssets|json}")}\n'
+ > fatelog = log -G -T '{node|short}\n{if(succsandmarkers, " Obsfate: {succsandmarkers % "{obsfate}"} \n" )}'
+ > fatelogjson = log -G -T '{node|short}\n{if(succsandmarkers, " Obsfate: {succsandmarkers|json}\n")}'
> EOF
Test templates on amended commit
@@ -33,28 +42,23 @@
$ mkcommit ROOT
$ mkcommit A0
$ echo 42 >> A0
- $ hg commit --amend -m "A1"
- $ hg commit --amend -m "A2"
+ $ HGUSER=test1 hg commit --amend -m "A1" --config devel.default-date="1234567890 0"
+ $ HGUSER=test2 hg commit --amend -m "A2" --config devel.default-date="987654321 0"
$ hg log --hidden -G
- @ changeset: 4:d004c8f274b9
+ @ changeset: 3:d004c8f274b9
| tag: tip
| parent: 0:ea207398892e
| user: test
| date: Thu Jan 01 00:00:00 1970 +0000
| summary: A2
|
- | x changeset: 3:a468dc9b3633
+ | x changeset: 2:a468dc9b3633
|/ parent: 0:ea207398892e
| user: test
| date: Thu Jan 01 00:00:00 1970 +0000
| summary: A1
|
- | x changeset: 2:f137d23bb3e1
- | | user: test
- | | date: Thu Jan 01 00:00:00 1970 +0000
- | | summary: temporary amend commit for 471f378eab4c
- | |
| x changeset: 1:471f378eab4c
|/ user: test
| date: Thu Jan 01 00:00:00 1970 +0000
@@ -78,51 +82,86 @@
| json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
| map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
| @ 471f378eab4c
- |/ Successors: 4:d004c8f274b9
- | multi-line: 4:d004c8f274b9
+ |/ Successors: 3:d004c8f274b9
+ | multi-line: 3:d004c8f274b9
| json: [["d004c8f274b9ec480a47a93c10dac5eee63adb78"]]
o ea207398892e
+ $ hg fatelog -q --traceback
+ o d004c8f274b9
+ |
+ | @ 471f378eab4c
+ |/ Obsfate: rewritten using amend as 3:d004c8f274b9 by test1, test2 (between 2001-04-19 04:25 +0000 and 2009-02-13 23:31 +0000);
+ o ea207398892e
+
+ $ hg fatelog
+ o d004c8f274b9
+ |
+ | @ 471f378eab4c
+ |/ Obsfate: rewritten using amend as 3:d004c8f274b9 by test1, test2 (between 2001-04-19 04:25 +0000 and 2009-02-13 23:31 +0000);
+ o ea207398892e
+
+ $ hg fatelog -v
+ o d004c8f274b9
+ |
+ | @ 471f378eab4c
+ |/ Obsfate: rewritten using amend as 3:d004c8f274b9 by test1, test2 (between 2001-04-19 04:25 +0000 and 2009-02-13 23:31 +0000);
+ o ea207398892e
+
$ hg up 'desc(A1)' --hidden
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
Predecessors template should show current revision as it is the working copy
$ hg tlog
o d004c8f274b9
- | Predecessors: 3:a468dc9b3633
- | semi-colon: 3:a468dc9b3633
+ | Predecessors: 2:a468dc9b3633
+ | semi-colon: 2:a468dc9b3633
| json: ["a468dc9b36338b14fdb7825f55ce3df4e71517ad"]
- | map: 3:a468dc9b36338b14fdb7825f55ce3df4e71517ad
+ | map: 2:a468dc9b36338b14fdb7825f55ce3df4e71517ad
| @ a468dc9b3633
- |/ Successors: 4:d004c8f274b9
- | multi-line: 4:d004c8f274b9
+ |/ Successors: 3:d004c8f274b9
+ | multi-line: 3:d004c8f274b9
| json: [["d004c8f274b9ec480a47a93c10dac5eee63adb78"]]
o ea207398892e
+ $ hg fatelog -v
+ o d004c8f274b9
+ |
+ | @ a468dc9b3633
+ |/ Obsfate: rewritten using amend as 3:d004c8f274b9 by test2 (at 2001-04-19 04:25 +0000);
+ o ea207398892e
+
Predecessors template should show all the predecessors as we force their display
with --hidden
$ hg tlog --hidden
o d004c8f274b9
- | Predecessors: 3:a468dc9b3633
- | semi-colon: 3:a468dc9b3633
+ | Predecessors: 2:a468dc9b3633
+ | semi-colon: 2:a468dc9b3633
| json: ["a468dc9b36338b14fdb7825f55ce3df4e71517ad"]
- | map: 3:a468dc9b36338b14fdb7825f55ce3df4e71517ad
+ | map: 2:a468dc9b36338b14fdb7825f55ce3df4e71517ad
| @ a468dc9b3633
|/ Predecessors: 1:471f378eab4c
| semi-colon: 1:471f378eab4c
| json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
| map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
- | Successors: 4:d004c8f274b9
- | multi-line: 4:d004c8f274b9
+ | Successors: 3:d004c8f274b9
+ | multi-line: 3:d004c8f274b9
| json: [["d004c8f274b9ec480a47a93c10dac5eee63adb78"]]
- | x f137d23bb3e1
- | |
| x 471f378eab4c
- |/ Successors: 3:a468dc9b3633
- | multi-line: 3:a468dc9b3633
+ |/ Successors: 2:a468dc9b3633
+ | multi-line: 2:a468dc9b3633
| json: [["a468dc9b36338b14fdb7825f55ce3df4e71517ad"]]
o ea207398892e
+ $ hg fatelog --hidden -q
+ o d004c8f274b9
+ |
+ | @ a468dc9b3633
+ |/ Obsfate: rewritten using amend as 3:d004c8f274b9 by test2 (at 2001-04-19 04:25 +0000);
+ | x 471f378eab4c
+ |/ Obsfate: rewritten using amend as 2:a468dc9b3633 by test1 (at 2009-02-13 23:31 +0000);
+ o ea207398892e
+
Predecessors template shouldn't show anything as all obsolete commit are not
visible.
@@ -135,27 +174,48 @@
$ hg tlog --hidden
@ d004c8f274b9
- | Predecessors: 3:a468dc9b3633
- | semi-colon: 3:a468dc9b3633
+ | Predecessors: 2:a468dc9b3633
+ | semi-colon: 2:a468dc9b3633
| json: ["a468dc9b36338b14fdb7825f55ce3df4e71517ad"]
- | map: 3:a468dc9b36338b14fdb7825f55ce3df4e71517ad
+ | map: 2:a468dc9b36338b14fdb7825f55ce3df4e71517ad
| x a468dc9b3633
|/ Predecessors: 1:471f378eab4c
| semi-colon: 1:471f378eab4c
| json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
| map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
- | Successors: 4:d004c8f274b9
- | multi-line: 4:d004c8f274b9
+ | Successors: 3:d004c8f274b9
+ | multi-line: 3:d004c8f274b9
| json: [["d004c8f274b9ec480a47a93c10dac5eee63adb78"]]
- | x f137d23bb3e1
- | |
| x 471f378eab4c
- |/ Successors: 3:a468dc9b3633
- | multi-line: 3:a468dc9b3633
+ |/ Successors: 2:a468dc9b3633
+ | multi-line: 2:a468dc9b3633
| json: [["a468dc9b36338b14fdb7825f55ce3df4e71517ad"]]
o ea207398892e
+ $ hg fatelog -v
+ @ d004c8f274b9
+ |
+ o ea207398892e
+
+ $ hg fatelog -v --hidden
+ @ d004c8f274b9
+ |
+ | x a468dc9b3633
+ |/ Obsfate: rewritten using amend as 3:d004c8f274b9 by test2 (at 2001-04-19 04:25 +0000);
+ | x 471f378eab4c
+ |/ Obsfate: rewritten using amend as 2:a468dc9b3633 by test1 (at 2009-02-13 23:31 +0000);
+ o ea207398892e
+
+ $ hg fatelogjson --hidden
+ @ d004c8f274b9
+ |
+ | x a468dc9b3633
+ |/ Obsfate: [{"markers": [["a468dc9b36338b14fdb7825f55ce3df4e71517ad", ["d004c8f274b9ec480a47a93c10dac5eee63adb78"], 0, [["operation", "amend"], ["user", "test2"]], [987654321.0, 0], null]], "successors": ["d004c8f274b9ec480a47a93c10dac5eee63adb78"]}]
+ | x 471f378eab4c
+ |/ Obsfate: [{"markers": [["471f378eab4c5e25f6c77f785b27c936efb22874", ["a468dc9b36338b14fdb7825f55ce3df4e71517ad"], 0, [["operation", "amend"], ["user", "test1"]], [1234567890.0, 0], null]], "successors": ["a468dc9b36338b14fdb7825f55ce3df4e71517ad"]}]
+ o ea207398892e
+
Test templates with splitted commit
===================================
@@ -239,6 +299,16 @@
| json: [["337fec4d2edcf0e7a467e35f818234bc620068b5", "f257fde29c7a847c9b607f6e958656d0df0fb15c"]]
o ea207398892e
+
+ $ hg fatelog
+ o f257fde29c7a
+ |
+ o 337fec4d2edc
+ |
+ | @ 471597cad322
+ |/ Obsfate: split as 2:337fec4d2edc, 3:f257fde29c7a by test (at 1970-01-01 00:00 +0000);
+ o ea207398892e
+
$ hg up f257fde29c7a
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -270,6 +340,25 @@
| json: [["337fec4d2edcf0e7a467e35f818234bc620068b5", "f257fde29c7a847c9b607f6e958656d0df0fb15c"]]
o ea207398892e
+
+ $ hg fatelog --hidden
+ @ f257fde29c7a
+ |
+ o 337fec4d2edc
+ |
+ | x 471597cad322
+ |/ Obsfate: split as 2:337fec4d2edc, 3:f257fde29c7a by test (at 1970-01-01 00:00 +0000);
+ o ea207398892e
+
+ $ hg fatelogjson --hidden
+ @ f257fde29c7a
+ |
+ o 337fec4d2edc
+ |
+ | x 471597cad322
+ |/ Obsfate: [{"markers": [["471597cad322d1f659bb169751be9133dad92ef3", ["337fec4d2edcf0e7a467e35f818234bc620068b5", "f257fde29c7a847c9b607f6e958656d0df0fb15c"], 0, [["user", "test"]], [0.0, 0], null]], "successors": ["337fec4d2edcf0e7a467e35f818234bc620068b5", "f257fde29c7a847c9b607f6e958656d0df0fb15c"]}]
+ o ea207398892e
+
Test templates with folded commit
=================================
@@ -354,6 +443,14 @@
| json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
o ea207398892e
+
+ $ hg fatelog
+ o eb5a0daa2192
+ |
+ | @ 471f378eab4c
+ |/ Obsfate: rewritten as 3:eb5a0daa2192 by test (at 1970-01-01 00:00 +0000);
+ o ea207398892e
+
$ hg up 'desc(B0)' --hidden
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -375,6 +472,16 @@
| json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
o ea207398892e
+
+ $ hg fatelog
+ o eb5a0daa2192
+ |
+ | @ 0dec01379d3b
+ | | Obsfate: rewritten as 3:eb5a0daa2192 by test (at 1970-01-01 00:00 +0000);
+ | x 471f378eab4c
+ |/ Obsfate: rewritten as 3:eb5a0daa2192 by test (at 1970-01-01 00:00 +0000);
+ o ea207398892e
+
$ hg up 'desc(C0)'
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -404,6 +511,26 @@
o ea207398892e
+ $ hg fatelog --hidden
+ @ eb5a0daa2192
+ |
+ | x 0dec01379d3b
+ | | Obsfate: rewritten as 3:eb5a0daa2192 by test (at 1970-01-01 00:00 +0000);
+ | x 471f378eab4c
+ |/ Obsfate: rewritten as 3:eb5a0daa2192 by test (at 1970-01-01 00:00 +0000);
+ o ea207398892e
+
+
+ $ hg fatelogjson --hidden
+ @ eb5a0daa2192
+ |
+ | x 0dec01379d3b
+ | | Obsfate: [{"markers": [["0dec01379d3be6318c470ead31b1fe7ae7cb53d5", ["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"], 0, [["user", "test"]], [0.0, 0], null]], "successors": ["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]}]
+ | x 471f378eab4c
+ |/ Obsfate: [{"markers": [["471f378eab4c5e25f6c77f785b27c936efb22874", ["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"], 0, [["user", "test"]], [0.0, 0], null]], "successors": ["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]}]
+ o ea207398892e
+
+
Test templates with divergence
==============================
@@ -442,14 +569,14 @@
| parent: 0:ea207398892e
| user: test
| date: Thu Jan 01 00:00:00 1970 +0000
- | trouble: divergent
+ | instability: content-divergent
| summary: A2
|
| o changeset: 2:fdf9bde5129a
|/ parent: 0:ea207398892e
| user: test
| date: Thu Jan 01 00:00:00 1970 +0000
- | trouble: divergent
+ | instability: content-divergent
| summary: A1
|
| x changeset: 1:471f378eab4c
@@ -469,7 +596,7 @@
| parent: 0:ea207398892e
| user: test
| date: Thu Jan 01 00:00:00 1970 +0000
- | trouble: divergent
+ | instability: content-divergent
| summary: A3
|
| x changeset: 3:65b757b745b9
@@ -482,7 +609,7 @@
|/ parent: 0:ea207398892e
| user: test
| date: Thu Jan 01 00:00:00 1970 +0000
- | trouble: divergent
+ | instability: content-divergent
| summary: A1
|
| x changeset: 1:471f378eab4c
@@ -521,6 +648,15 @@
| json: [["fdf9bde5129a28d4548fadd3f62b265cdd3b7a2e"], ["019fadeab383f6699fa83ad7bdb4d82ed2c0e5ab"]]
o ea207398892e
+ $ hg fatelog
+ o 019fadeab383
+ |
+ | o fdf9bde5129a
+ |/
+ | @ 471f378eab4c
+ |/ Obsfate: rewritten using amend as 2:fdf9bde5129a by test (at 1970-01-01 00:00 +0000); rewritten using amend as 4:019fadeab383 by test (at 1970-01-01 00:00 +0000);
+ o ea207398892e
+
$ hg up 'desc(A1)'
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -533,6 +669,14 @@
|/
o ea207398892e
+
+ $ hg fatelog
+ o 019fadeab383
+ |
+ | @ fdf9bde5129a
+ |/
+ o ea207398892e
+
Predecessors template should the predecessors as we force their display with
--hidden
$ hg tlog --hidden
@@ -562,6 +706,30 @@
o ea207398892e
+ $ hg fatelog --hidden
+ o 019fadeab383
+ |
+ | x 65b757b745b9
+ |/ Obsfate: rewritten using amend as 4:019fadeab383 by test (at 1970-01-01 00:00 +0000);
+ | @ fdf9bde5129a
+ |/
+ | x 471f378eab4c
+ |/ Obsfate: rewritten using amend as 2:fdf9bde5129a by test (at 1970-01-01 00:00 +0000); rewritten using amend as 3:65b757b745b9 by test (at 1970-01-01 00:00 +0000);
+ o ea207398892e
+
+
+ $ hg fatelogjson --hidden
+ o 019fadeab383
+ |
+ | x 65b757b745b9
+ |/ Obsfate: [{"markers": [["65b757b745b935093c87a2bccd877521cccffcbd", ["019fadeab383f6699fa83ad7bdb4d82ed2c0e5ab"], 0, [["operation", "amend"], ["user", "test"]], [0.0, 0], null]], "successors": ["019fadeab383f6699fa83ad7bdb4d82ed2c0e5ab"]}]
+ | @ fdf9bde5129a
+ |/
+ | x 471f378eab4c
+ |/ Obsfate: [{"markers": [["471f378eab4c5e25f6c77f785b27c936efb22874", ["fdf9bde5129a28d4548fadd3f62b265cdd3b7a2e"], 0, [["operation", "amend"], ["user", "test"]], [0.0, 0], null]], "successors": ["fdf9bde5129a28d4548fadd3f62b265cdd3b7a2e"]}, {"markers": [["471f378eab4c5e25f6c77f785b27c936efb22874", ["65b757b745b935093c87a2bccd877521cccffcbd"], 0, [["operation", "amend"], ["user", "test"]], [0.0, 0], null]], "successors": ["65b757b745b935093c87a2bccd877521cccffcbd"]}]
+ o ea207398892e
+
+
Test templates with amended + folded commit
===========================================
@@ -659,6 +827,14 @@
| json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
o ea207398892e
+
+ $ hg fatelog
+ o eb5a0daa2192
+ |
+ | @ 471f378eab4c
+ |/ Obsfate: rewritten as 4:eb5a0daa2192 by test (at 1970-01-01 00:00 +0000);
+ o ea207398892e
+
$ hg up 'desc(B0)' --hidden
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -679,6 +855,16 @@
| json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
o ea207398892e
+
+ $ hg fatelog
+ o eb5a0daa2192
+ |
+ | @ 0dec01379d3b
+ | | Obsfate: rewritten using amend as 4:eb5a0daa2192 by test (at 1970-01-01 00:00 +0000);
+ | x 471f378eab4c
+ |/ Obsfate: rewritten as 4:eb5a0daa2192 by test (at 1970-01-01 00:00 +0000);
+ o ea207398892e
+
$ hg up 'desc(B1)' --hidden
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -699,6 +885,16 @@
| json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
o ea207398892e
+
+ $ hg fatelog
+ o eb5a0daa2192
+ |
+ | @ b7ea6d14e664
+ | | Obsfate: rewritten as 4:eb5a0daa2192 by test (at 1970-01-01 00:00 +0000);
+ | x 471f378eab4c
+ |/ Obsfate: rewritten as 4:eb5a0daa2192 by test (at 1970-01-01 00:00 +0000);
+ o ea207398892e
+
$ hg up 'desc(C0)'
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -708,6 +904,12 @@
|
o ea207398892e
+
+ $ hg fatelog
+ @ eb5a0daa2192
+ |
+ o ea207398892e
+
Predecessors template should show all predecessors as we force their display
with --hidden
$ hg tlog --hidden
@@ -735,6 +937,30 @@
o ea207398892e
+ $ hg fatelog --hidden
+ @ eb5a0daa2192
+ |
+ | x b7ea6d14e664
+ | | Obsfate: rewritten as 4:eb5a0daa2192 by test (at 1970-01-01 00:00 +0000);
+ | | x 0dec01379d3b
+ | |/ Obsfate: rewritten using amend as 3:b7ea6d14e664 by test (at 1970-01-01 00:00 +0000);
+ | x 471f378eab4c
+ |/ Obsfate: rewritten as 4:eb5a0daa2192 by test (at 1970-01-01 00:00 +0000);
+ o ea207398892e
+
+
+ $ hg fatelogjson --hidden
+ @ eb5a0daa2192
+ |
+ | x b7ea6d14e664
+ | | Obsfate: [{"markers": [["b7ea6d14e664bdc8922221f7992631b50da3fb07", ["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"], 0, [["user", "test"]], [0.0, 0], null]], "successors": ["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]}]
+ | | x 0dec01379d3b
+ | |/ Obsfate: [{"markers": [["0dec01379d3be6318c470ead31b1fe7ae7cb53d5", ["b7ea6d14e664bdc8922221f7992631b50da3fb07"], 0, [["operation", "amend"], ["user", "test"]], [0.0, 0], null]], "successors": ["b7ea6d14e664bdc8922221f7992631b50da3fb07"]}]
+ | x 471f378eab4c
+ |/ Obsfate: [{"markers": [["471f378eab4c5e25f6c77f785b27c936efb22874", ["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"], 0, [["user", "test"]], [0.0, 0], null]], "successors": ["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]}]
+ o ea207398892e
+
+
Test template with pushed and pulled obs markers
================================================
@@ -819,8 +1045,8 @@
$ hg debugobsolete
- 471f378eab4c5e25f6c77f785b27c936efb22874 fdf9bde5129a28d4548fadd3f62b265cdd3b7a2e 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
- fdf9bde5129a28d4548fadd3f62b265cdd3b7a2e 7a230b46bf61e50b30308c6cfd7bd1269ef54702 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 471f378eab4c5e25f6c77f785b27c936efb22874 fdf9bde5129a28d4548fadd3f62b265cdd3b7a2e 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
+ fdf9bde5129a28d4548fadd3f62b265cdd3b7a2e 7a230b46bf61e50b30308c6cfd7bd1269ef54702 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
Check templates
---------------
@@ -838,6 +1064,14 @@
| json: [["7a230b46bf61e50b30308c6cfd7bd1269ef54702"]]
o ea207398892e
+
+ $ hg fatelog
+ o 7a230b46bf61
+ |
+ | @ 471f378eab4c
+ |/ Obsfate: rewritten using amend as 2:7a230b46bf61 by test (at 1970-01-01 00:00 +0000);
+ o ea207398892e
+
$ hg up 'desc(A2)'
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -847,6 +1081,12 @@
|
o ea207398892e
+
+ $ hg fatelog
+ @ 7a230b46bf61
+ |
+ o ea207398892e
+
Predecessors template should show all predecessors as we force their display
with --hidden
$ hg tlog --hidden
@@ -862,6 +1102,14 @@
o ea207398892e
+ $ hg fatelog --hidden
+ @ 7a230b46bf61
+ |
+ | x 471f378eab4c
+ |/ Obsfate: rewritten using amend as 2:7a230b46bf61 by test (at 1970-01-01 00:00 +0000);
+ o ea207398892e
+
+
Test template with obsmarkers cycle
===================================
@@ -895,6 +1143,12 @@
o ea207398892e
+ $ hg fatelog
+ @ f897c6137566
+ |
+ o ea207398892e
+
+
$ hg up -r "desc(B0)" --hidden
2 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ hg tlog
@@ -923,6 +1177,16 @@
o ea207398892e
+ $ hg fatelog
+ o f897c6137566
+ |
+ | @ 0dec01379d3b
+ | | Obsfate: rewritten as 3:f897c6137566 by test (at 1970-01-01 00:00 +0000); rewritten as 1:471f378eab4c by test (at 1970-01-01 00:00 +0000);
+ | x 471f378eab4c
+ |/ Obsfate: rewritten as 2:0dec01379d3b by test (at 1970-01-01 00:00 +0000);
+ o ea207398892e
+
+
$ hg up -r "desc(A0)" --hidden
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ hg tlog
@@ -936,6 +1200,14 @@
o ea207398892e
+ $ hg fatelog
+ o f897c6137566
+ |
+ | @ 471f378eab4c
+ |/ Obsfate: pruned;
+ o ea207398892e
+
+
$ hg up -r "desc(ROOT)" --hidden
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ hg tlog
@@ -944,6 +1216,12 @@
@ ea207398892e
+ $ hg fatelog
+ o f897c6137566
+ |
+ @ ea207398892e
+
+
$ hg tlog --hidden
o f897c6137566
| Predecessors: 2:0dec01379d3b
@@ -1086,20 +1364,20 @@
| parent: 5:dd800401bd8c
| user: test
| date: Thu Jan 01 00:00:00 1970 +0000
- | trouble: divergent
+ | instability: content-divergent
| summary: Add B only
|
| o changeset: 8:b18bc8331526
|/ parent: 5:dd800401bd8c
| user: test
| date: Thu Jan 01 00:00:00 1970 +0000
- | trouble: divergent
+ | instability: content-divergent
| summary: Add only B
|
| o changeset: 7:ba2ed02b0c9a
| | user: test
| | date: Thu Jan 01 00:00:00 1970 +0000
- | | trouble: unstable, divergent
+ | | instability: orphan, content-divergent
| | summary: Add A,B,C
| |
| x changeset: 6:4a004186e638
@@ -1111,7 +1389,7 @@
| parent: 3:f897c6137566
| user: test
| date: Thu Jan 01 00:00:00 1970 +0000
- | trouble: divergent
+ | instability: content-divergent
| summary: Add A,B,C
|
o changeset: 3:f897c6137566
@@ -1153,6 +1431,21 @@
|
o ea207398892e
+ $ hg fatelog
+ @ 0b997eb7ceee
+ |
+ | o b18bc8331526
+ |/
+ | o ba2ed02b0c9a
+ | |
+ | x 4a004186e638
+ |/ Obsfate: rewritten using amend as 8:b18bc8331526 by test (at 1970-01-01 00:00 +0000); rewritten using amend as 9:0b997eb7ceee by test (at 1970-01-01 00:00 +0000);
+ o dd800401bd8c
+ |
+ o f897c6137566
+ |
+ o ea207398892e
+
$ hg tlog --hidden
@ 0b997eb7ceee
| Predecessors: 6:4a004186e638
@@ -1211,6 +1504,48 @@
| json: [["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]]
o ea207398892e
+ $ hg fatelog --hidden
+ @ 0b997eb7ceee
+ |
+ | o b18bc8331526
+ |/
+ | o ba2ed02b0c9a
+ | |
+ | x 4a004186e638
+ |/ Obsfate: rewritten using amend as 8:b18bc8331526 by test (at 1970-01-01 00:00 +0000); rewritten using amend as 9:0b997eb7ceee by test (at 1970-01-01 00:00 +0000);
+ o dd800401bd8c
+ |
+ | x 9bd10a0775e4
+ |/ Obsfate: split as 5:dd800401bd8c, 6:4a004186e638, 7:ba2ed02b0c9a by test (at 1970-01-01 00:00 +0000);
+ o f897c6137566
+ |
+ | x 0dec01379d3b
+ | | Obsfate: rewritten as 3:f897c6137566 by test (at 1970-01-01 00:00 +0000); rewritten as 1:471f378eab4c by test (at 1970-01-01 00:00 +0000);
+ | x 471f378eab4c
+ |/ Obsfate: rewritten as 2:0dec01379d3b by test (at 1970-01-01 00:00 +0000);
+ o ea207398892e
+
+ $ hg fatelogjson --hidden
+ @ 0b997eb7ceee
+ |
+ | o b18bc8331526
+ |/
+ | o ba2ed02b0c9a
+ | |
+ | x 4a004186e638
+ |/ Obsfate: [{"markers": [["4a004186e63889f20cb16434fcbd72220bd1eace", ["b18bc8331526a22cbb1801022bd1555bf291c48b"], 0, [["operation", "amend"], ["user", "test"]], [0.0, 0], null]], "successors": ["b18bc8331526a22cbb1801022bd1555bf291c48b"]}, {"markers": [["4a004186e63889f20cb16434fcbd72220bd1eace", ["0b997eb7ceeee06200a02f8aab185979092d514e"], 0, [["operation", "amend"], ["user", "test"]], [0.0, 0], null]], "successors": ["0b997eb7ceeee06200a02f8aab185979092d514e"]}]
+ o dd800401bd8c
+ |
+ | x 9bd10a0775e4
+ |/ Obsfate: [{"markers": [["9bd10a0775e478708cada5f176ec6de654359ce7", ["dd800401bd8c79d815329277739e433e883f784e", "4a004186e63889f20cb16434fcbd72220bd1eace", "ba2ed02b0c9a56b9fdbc4e79c7e57866984d8a1f"], 0, [["user", "test"]], [0.0, 0], null]], "successors": ["dd800401bd8c79d815329277739e433e883f784e", "4a004186e63889f20cb16434fcbd72220bd1eace", "ba2ed02b0c9a56b9fdbc4e79c7e57866984d8a1f"]}]
+ o f897c6137566
+ |
+ | x 0dec01379d3b
+ | | Obsfate: [{"markers": [["0dec01379d3be6318c470ead31b1fe7ae7cb53d5", ["f897c6137566320b081514b4c7227ecc3d384b39"], 0, [["user", "test"]], [0.0, 0], null]], "successors": ["f897c6137566320b081514b4c7227ecc3d384b39"]}, {"markers": [["0dec01379d3be6318c470ead31b1fe7ae7cb53d5", ["471f378eab4c5e25f6c77f785b27c936efb22874"], 0, [["user", "test"]], [0.0, 0], null]], "successors": ["471f378eab4c5e25f6c77f785b27c936efb22874"]}]
+ | x 471f378eab4c
+ |/ Obsfate: [{"markers": [["471f378eab4c5e25f6c77f785b27c936efb22874", ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"], 0, [["user", "test"]], [0.0, 0], null]], "successors": ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]}]
+ o ea207398892e
+
$ hg up --hidden 4
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg rebase -r 7 -d 8 --config extensions.rebase=
@@ -1245,6 +1580,22 @@
|
o ea207398892e
+
+ $ hg fatelog
+ o eceed8f98ffc
+ |
+ | o 0b997eb7ceee
+ | |
+ o | b18bc8331526
+ |/
+ o dd800401bd8c
+ |
+ | @ 9bd10a0775e4
+ |/ Obsfate: split using amend, rebase as 5:dd800401bd8c, 9:0b997eb7ceee, 10:eceed8f98ffc by test (at 1970-01-01 00:00 +0000); split using amend, rebase as 5:dd800401bd8c, 8:b18bc8331526, 10:eceed8f98ffc by test (at 1970-01-01 00:00 +0000);
+ o f897c6137566
+ |
+ o ea207398892e
+
Test templates with pruned commits
==================================
@@ -1268,3 +1619,148 @@
|
o ea207398892e
+ $ hg fatelog
+ @ 471f378eab4c
+ | Obsfate: pruned by test (at 1970-01-01 00:00 +0000);
+ o ea207398892e
+
+ $ hg fatelog -v
+ @ 471f378eab4c
+ | Obsfate: pruned by test (at 1970-01-01 00:00 +0000);
+ o ea207398892e
+
+Test templates with multiple pruned commits
+===========================================
+
+Test setup
+----------
+
+ $ hg init $TESTTMP/multiple-local-prune
+ $ cd $TESTTMP/multiple-local-prune
+ $ mkcommit ROOT
+ $ mkcommit A0
+ $ hg commit --amend -m "A1"
+ $ hg debugobsolete --record-parent `getid "."`
+ obsoleted 1 changesets
+
+ $ hg up -r "desc(A0)" --hidden
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg commit --amend -m "A2"
+ $ hg debugobsolete --record-parent `getid "."`
+ obsoleted 1 changesets
+
+Check output
+------------
+
+ $ hg up "desc(A0)" --hidden
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg tlog
+ @ 471f378eab4c
+ |
+ o ea207398892e
+
+# todo: the obsfate output is not ideal
+ $ hg fatelog
+ @ 471f378eab4c
+ | Obsfate: pruned;
+ o ea207398892e
+
+ $ hg fatelog -v --hidden
+ x 65b757b745b9
+ | Obsfate: pruned by test (at 1970-01-01 00:00 +0000);
+ | x fdf9bde5129a
+ |/ Obsfate: pruned by test (at 1970-01-01 00:00 +0000);
+ | @ 471f378eab4c
+ |/ Obsfate: rewritten using amend as 2:fdf9bde5129a by test (at 1970-01-01 00:00 +0000); rewritten using amend as 3:65b757b745b9 by test (at 1970-01-01 00:00 +0000);
+ o ea207398892e
+
+
+Test templates with splitted and pruned commit
+==============================================
+
+ $ hg init $TESTTMP/templates-local-split-prune
+ $ cd $TESTTMP/templates-local-split-prune
+ $ mkcommit ROOT
+ $ echo 42 >> a
+ $ echo 43 >> b
+ $ hg commit -A -m "A0"
+ adding a
+ adding b
+ $ hg log --hidden -G
+ @ changeset: 1:471597cad322
+ | tag: tip
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+# Simulate split
+ $ hg up -r "desc(ROOT)"
+ 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ $ echo 42 >> a
+ $ hg commit -A -m "A1"
+ adding a
+ created new head
+ $ echo 43 >> b
+ $ hg commit -A -m "A2"
+ adding b
+ $ hg debugobsolete `getid "1"` `getid "2"` `getid "3"`
+ obsoleted 1 changesets
+
+# Simulate prune
+ $ hg debugobsolete --record-parent `getid "."`
+ obsoleted 1 changesets
+
+ $ hg log --hidden -G
+ @ changeset: 3:0d0ef4bdf70e
+ | tag: tip
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A2
+ |
+ o changeset: 2:617adc3a144c
+ | parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A1
+ |
+ | x changeset: 1:471597cad322
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+Check templates
+---------------
+
+ $ hg up 'desc("A0")' --hidden
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+# todo: the obsfate output is not ideal
+ $ hg fatelog
+ o 617adc3a144c
+ |
+ | @ 471597cad322
+ |/ Obsfate: pruned;
+ o ea207398892e
+
+ $ hg up -r 'desc("A2")' --hidden
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+ $ hg fatelog --hidden
+ @ 0d0ef4bdf70e
+ | Obsfate: pruned by test (at 1970-01-01 00:00 +0000);
+ o 617adc3a144c
+ |
+ | x 471597cad322
+ |/ Obsfate: split as 2:617adc3a144c, 3:0d0ef4bdf70e by test (at 1970-01-01 00:00 +0000);
+ o ea207398892e
+
--- a/tests/test-obsolete-bundle-strip.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-obsolete-bundle-strip.t Sat Sep 30 07:52:48 2017 -0700
@@ -15,10 +15,10 @@
>
> [experimental]
> # enable evolution
- > evolution = all
+ > stabilization = all
>
> # include obsmarkers in bundle
- > evolution.bundle-obsmarker = yes
+ > stabilization.bundle-obsmarker = yes
>
> [extensions]
> # needed for some tests
--- a/tests/test-obsolete-changeset-exchange.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-obsolete-changeset-exchange.t Sat Sep 30 07:52:48 2017 -0700
@@ -3,7 +3,7 @@
$ cat >> $HGRCPATH << EOF
> [experimental]
- > evolution=createmarkers
+ > stabilization=createmarkers
> EOF
Push does not corrupt remote
@@ -87,17 +87,17 @@
check-that bundle can contain markers:
- $ hg bundle --hidden --rev f89bcc95eba5 --base "f89bcc95eba5^" ../f89bcc95eba5-obs.hg --config experimental.evolution.bundle-obsmarker=1
+ $ hg bundle --hidden --rev f89bcc95eba5 --base "f89bcc95eba5^" ../f89bcc95eba5-obs.hg --config experimental.stabilization.bundle-obsmarker=1
1 changesets found
$ hg debugbundle ../f89bcc95eba5.hg
- Stream params: sortdict([('Compression', 'BZ')])
- changegroup -- "sortdict([('version', '02'), ('nbchanges', '1')])"
+ Stream params: {Compression: BZ}
+ changegroup -- {nbchanges: 1, version: 02}
f89bcc95eba5174b1ccc3e33a82e84c96e8338ee
$ hg debugbundle ../f89bcc95eba5-obs.hg
- Stream params: sortdict([('Compression', 'BZ')])
- changegroup -- "sortdict([('version', '02'), ('nbchanges', '1')])"
+ Stream params: {Compression: BZ}
+ changegroup -- {nbchanges: 1, version: 02}
f89bcc95eba5174b1ccc3e33a82e84c96e8338ee
- obsmarkers -- 'sortdict()'
+ obsmarkers -- {}
version: 1 (70 bytes)
9d73aac1b2ed7d53835eaeec212ed41ea47da53a f89bcc95eba5174b1ccc3e33a82e84c96e8338ee 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
@@ -149,12 +149,11 @@
1 changesets found
list of changesets:
bec0734cd68e84477ba7fc1d13e6cff53ab70129
- listing keys for "phases"
listing keys for "bookmarks"
bundle2-output-bundle: "HG20", 3 parts total
bundle2-output-part: "changegroup" (params: 1 mandatory 1 advisory) streamed payload
- bundle2-output-part: "listkeys" (params: 1 mandatory) 58 bytes payload
bundle2-output-part: "listkeys" (params: 1 mandatory) empty payload
+ bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "changegroup" (params: 1 mandatory 1 advisory) supported
adding changesets
@@ -165,8 +164,8 @@
added 1 changesets with 1 changes to 1 files (+1 heads)
bundle2-input-part: total payload size 476
bundle2-input-part: "listkeys" (params: 1 mandatory) supported
- bundle2-input-part: total payload size 58
- bundle2-input-part: "listkeys" (params: 1 mandatory) supported
+ bundle2-input-part: "phase-heads" supported
+ bundle2-input-part: total payload size 24
bundle2-input-bundle: 2 parts total
checking for updated bookmarks
updating the branch cache
--- a/tests/test-obsolete-checkheads.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-obsolete-checkheads.t Sat Sep 30 07:52:48 2017 -0700
@@ -6,7 +6,7 @@
> [ui]
> logtemplate='{node|short} ({phase}) {desc|firstline}\n'
> [experimental]
- > evolution=createmarkers
+ > stabilization=createmarkers
> EOF
$ mkcommit() {
> echo "$1" > "$1"
--- a/tests/test-obsolete-divergent.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-obsolete-divergent.t Sat Sep 30 07:52:48 2017 -0700
@@ -9,7 +9,7 @@
> [ui]
> logtemplate = {rev}:{node|short} {desc}\n
> [experimental]
- > evolution=createmarkers
+ > stabilization=createmarkers
> [extensions]
> drawdag=$TESTDIR/drawdag.py
> [alias]
@@ -80,7 +80,7 @@
82623d38b9ba
392fd25390da
392fd25390da
- $ hg log -r 'divergent()'
+ $ hg log -r 'contentdivergent()'
2:82623d38b9ba A_1
3:392fd25390da A_2
$ hg debugsuccessorssets 'all()' --closest
@@ -107,7 +107,7 @@
$ hg push ../other
pushing to ../other
searching for changes
- abort: push includes divergent changeset: 392fd25390da!
+ abort: push includes content-divergent changeset: 392fd25390da!
[255]
$ cd ..
@@ -147,7 +147,7 @@
01f36c5a8fda
01f36c5a8fda
01f36c5a8fda
- $ hg log -r 'divergent()'
+ $ hg log -r 'contentdivergent()'
2:82623d38b9ba A_1
4:01f36c5a8fda A_3
$ hg debugsuccessorssets 'all()' --closest
@@ -199,7 +199,7 @@
82623d38b9ba
392fd25390da
392fd25390da
- $ hg log -r 'divergent()'
+ $ hg log -r 'contentdivergent()'
2:82623d38b9ba A_1
3:392fd25390da A_2
$ hg debugsuccessorssets 'all()' --closest
@@ -278,7 +278,7 @@
01f36c5a8fda
01f36c5a8fda
01f36c5a8fda
- $ hg log -r 'divergent()'
+ $ hg log -r 'contentdivergent()'
$ hg debugsuccessorssets 'all()' --closest
d20a80d4def3
d20a80d4def3
@@ -322,7 +322,7 @@
82623d38b9ba
392fd25390da
392fd25390da
- $ hg log -r 'divergent()'
+ $ hg log -r 'contentdivergent()'
$ hg debugsuccessorssets 'all()' --closest
d20a80d4def3
d20a80d4def3
@@ -410,7 +410,7 @@
e442cfc57690
e442cfc57690
e442cfc57690
- $ hg log -r 'divergent()'
+ $ hg log -r 'contentdivergent()'
Check more complex obsolescence graft (with divergence)
@@ -515,7 +515,7 @@
14608b260df8
bed64f5d2f5a
bed64f5d2f5a
- $ hg log -r 'divergent()'
+ $ hg log -r 'contentdivergent()'
4:01f36c5a8fda A_3
8:7ae126973a96 A_7
9:14608b260df8 A_8
@@ -614,7 +614,7 @@
a139f71be9da
a139f71be9da
a139f71be9da
- $ hg log -r 'divergent()'
+ $ hg log -r 'contentdivergent()'
$ cd ..
@@ -670,16 +670,16 @@
$ rm .hg/localtags
$ hg cleanup --config extensions.t=$TESTTMP/scmutilcleanup.py
- $ hg log -G -T '{rev}:{node|short} {desc} {troubles}' -r 'sort(all(), topo)'
- @ 5:1a2a9b5b0030 B2 divergent
+ $ hg log -G -T '{rev}:{node|short} {desc} {instabilities}' -r 'sort(all(), topo)'
+ @ 5:1a2a9b5b0030 B2 content-divergent
|
- | o 4:70d5a63ca112 B4 divergent
+ | o 4:70d5a63ca112 B4 content-divergent
| |
| o 1:48b9aae0607f Z
|
o 0:426bada5c675 A
$ hg debugobsolete
- a178212c3433c4e77b573f6011e29affb8aefa33 1a2a9b5b0030632400aa78e00388c20f99d3ec44 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
- a178212c3433c4e77b573f6011e29affb8aefa33 ad6478fb94ecec98b86daae98722865d494ac561 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
- ad6478fb94ecec98b86daae98722865d494ac561 70d5a63ca112acb3764bc1d7320ca90ea688d671 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a178212c3433c4e77b573f6011e29affb8aefa33 1a2a9b5b0030632400aa78e00388c20f99d3ec44 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
+ a178212c3433c4e77b573f6011e29affb8aefa33 ad6478fb94ecec98b86daae98722865d494ac561 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'test', 'user': 'test'}
+ ad6478fb94ecec98b86daae98722865d494ac561 70d5a63ca112acb3764bc1d7320ca90ea688d671 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'test', 'user': 'test'}
--- a/tests/test-obsolete-tag-cache.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-obsolete-tag-cache.t Sat Sep 30 07:52:48 2017 -0700
@@ -5,7 +5,7 @@
> mock=$TESTDIR/mockblackbox.py
>
> [experimental]
- > evolution = createmarkers
+ > stabilization = createmarkers
> EOF
Create a repo with some tags
--- a/tests/test-obsolete.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-obsolete.t Sat Sep 30 07:52:48 2017 -0700
@@ -3,7 +3,7 @@
> # public changeset are not obsolete
> publish=false
> [ui]
- > logtemplate="{rev}:{node|short} ({phase}{if(obsolete, ' *{obsolete}*')}{if(troubles, ' {troubles}')}) [{tags} {bookmarks}] {desc|firstline}\n"
+ > logtemplate="{rev}:{node|short} ({phase}{if(obsolete, ' *{obsolete}*')}{if(instabilities, ' {instabilities}')}) [{tags} {bookmarks}] {desc|firstline}\n"
> EOF
$ mkcommit() {
> echo "$1" > "$1"
@@ -39,7 +39,7 @@
$ cat >> $HGRCPATH << EOF
> [experimental]
- > evolution=createmarkers,exchange
+ > stabilization=createmarkers,exchange
> EOF
Killing a single changeset without replacement
@@ -207,7 +207,7 @@
$ hg --hidden phase --public 2
$ hg log -G
- @ 5:5601fb93a350 (draft bumped) [tip ] add new_3_c
+ @ 5:5601fb93a350 (draft phase-divergent) [tip ] add new_3_c
|
| o 2:245bde4270cd (public) [ ] add original_c
|/
@@ -223,8 +223,8 @@
note that the bumped changeset (5:5601fb93a350) is not a direct successor of
the public changeset
- $ hg log --hidden -r 'bumped()'
- 5:5601fb93a350 (draft bumped) [tip ] add new_3_c
+ $ hg log --hidden -r 'phasedivergent()'
+ 5:5601fb93a350 (draft phase-divergent) [tip ] add new_3_c
And that we can't push bumped changeset
@@ -239,20 +239,20 @@
$ hg push ../tmpa
pushing to ../tmpa
searching for changes
- abort: push includes bumped changeset: 5601fb93a350!
+ abort: push includes phase-divergent changeset: 5601fb93a350!
[255]
Fixing "bumped" situation
We need to create a clone of 5 and add a special marker with a flag
$ hg summary
- parent: 5:5601fb93a350 tip (bumped)
+ parent: 5:5601fb93a350 tip (phase-divergent)
add new_3_c
branch: default
commit: (clean)
update: 1 new changesets, 2 branch heads (merge)
phases: 1 draft
- bumped: 1 changesets
+ phase-divergent: 1 changesets
$ hg up '5^'
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ hg revert -ar 5
@@ -261,7 +261,7 @@
created new head
$ hg debugobsolete -d '1338 0' --flags 1 `getid new_3_c` `getid n3w_3_c`
obsoleted 1 changesets
- $ hg log -r 'bumped()'
+ $ hg log -r 'phasedivergent()'
$ hg log -G
@ 6:6f9641995072 (draft) [tip ] add n3w_3_c
|
@@ -520,15 +520,15 @@
$ hg log -r 'obsolete()'
4:94b33453f93b (draft *obsolete*) [ ] add original_d
$ hg summary
- parent: 5:cda648ca50f5 tip (unstable)
+ parent: 5:cda648ca50f5 tip (orphan)
add original_e
branch: default
commit: (clean)
update: 1 new changesets, 2 branch heads (merge)
phases: 3 draft
- unstable: 1 changesets
- $ hg log -G -r '::unstable()'
- @ 5:cda648ca50f5 (draft unstable) [tip ] add original_e
+ orphan: 1 changesets
+ $ hg log -G -r '::orphan()'
+ @ 5:cda648ca50f5 (draft orphan) [tip ] add original_e
|
x 4:94b33453f93b (draft *obsolete*) [ ] add original_d
|
@@ -552,7 +552,7 @@
$ hg push ../tmpc/
pushing to ../tmpc/
searching for changes
- abort: push includes unstable changeset: cda648ca50f5!
+ abort: push includes orphan changeset: cda648ca50f5!
[255]
Test that extinct changeset are properly detected
@@ -570,7 +570,7 @@
2:245bde4270cd (public) [ ] add original_c
3:6f9641995072 (draft) [ ] add n3w_3_c
4:94b33453f93b (draft *obsolete*) [ ] add original_d
- 5:cda648ca50f5 (draft unstable) [tip ] add original_e
+ 5:cda648ca50f5 (draft orphan) [tip ] add original_e
$ hg push ../tmpf -f # -f because be push unstable too
pushing to ../tmpf
searching for changes
@@ -591,7 +591,7 @@
Do not warn about new head when the new head is a successors of a remote one
$ hg log -G
- @ 5:cda648ca50f5 (draft unstable) [tip ] add original_e
+ @ 5:cda648ca50f5 (draft orphan) [tip ] add original_e
|
x 4:94b33453f93b (draft *obsolete*) [ ] add original_d
|
@@ -699,42 +699,42 @@
"date": [1339.0, 0],
"flag": 0,
"metadata": {"user": "test"},
- "precnode": "1339133913391339133913391339133913391339",
+ "prednode": "1339133913391339133913391339133913391339",
"succnodes": ["ca819180edb99ed25ceafb3e9584ac287e240b00"]
},
{
"date": [1339.0, 0],
"flag": 0,
"metadata": {"user": "test"},
- "precnode": "1337133713371337133713371337133713371337",
+ "prednode": "1337133713371337133713371337133713371337",
"succnodes": ["5601fb93a350734d935195fee37f4054c529ff39"]
},
{
"date": [121.0, 120],
"flag": 12,
"metadata": {"user": "test"},
- "precnode": "245bde4270cd1072a27757984f9cda8ba26f08ca",
+ "prednode": "245bde4270cd1072a27757984f9cda8ba26f08ca",
"succnodes": ["cdbce2fbb16313928851e97e0d85413f3f7eb77f"]
},
{
"date": [1338.0, 0],
"flag": 1,
"metadata": {"user": "test"},
- "precnode": "5601fb93a350734d935195fee37f4054c529ff39",
+ "prednode": "5601fb93a350734d935195fee37f4054c529ff39",
"succnodes": ["6f96419950729f3671185b847352890f074f7557"]
},
{
"date": [1338.0, 0],
"flag": 0,
"metadata": {"user": "test"},
- "precnode": "ca819180edb99ed25ceafb3e9584ac287e240b00",
+ "prednode": "ca819180edb99ed25ceafb3e9584ac287e240b00",
"succnodes": ["1337133713371337133713371337133713371337"]
},
{
"date": [1337.0, 0],
"flag": 0,
"metadata": {"user": "test"},
- "precnode": "cdbce2fbb16313928851e97e0d85413f3f7eb77f",
+ "prednode": "cdbce2fbb16313928851e97e0d85413f3f7eb77f",
"succnodes": ["ca819180edb99ed25ceafb3e9584ac287e240b00"]
},
{
@@ -742,14 +742,14 @@
"flag": 0,
"metadata": {"user": "test"},
"parentnodes": ["6f96419950729f3671185b847352890f074f7557"],
- "precnode": "94b33453f93bdb8d457ef9b770851a618bf413e1",
+ "prednode": "94b33453f93bdb8d457ef9b770851a618bf413e1",
"succnodes": []
},
{
"date": *, (glob)
"flag": 0,
"metadata": {"user": "test <test@example.net>"},
- "precnode": "cda648ca50f50482b7055c0b0c4c117bba6733d9",
+ "prednode": "cda648ca50f50482b7055c0b0c4c117bba6733d9",
"succnodes": ["3de5eca88c00aa039da7399a220f4a5221faa585"]
}
]
@@ -892,7 +892,7 @@
Checking _enable=False warning if obsolete marker exists
$ echo '[experimental]' >> $HGRCPATH
- $ echo "evolution=" >> $HGRCPATH
+ $ echo "stabilization=" >> $HGRCPATH
$ hg log -r tip
obsolete feature not enabled but 68 markers found!
68:c15e9edfca13 (draft) [tip ] add celestine
@@ -900,7 +900,7 @@
reenable for later test
$ echo '[experimental]' >> $HGRCPATH
- $ echo "evolution=createmarkers,exchange" >> $HGRCPATH
+ $ echo "stabilization=createmarkers,exchange" >> $HGRCPATH
$ rm hg.pid access.log errors.log
#endif
@@ -910,11 +910,11 @@
$ hg debugobsolete `getid obsolete_e`
obsoleted 1 changesets
$ hg debugobsolete `getid original_c` `getid babar`
- $ hg log --config ui.logtemplate= -r 'bumped() and unstable()'
+ $ hg log --config ui.logtemplate= -r 'phasedivergent() and orphan()'
changeset: 7:50c51b361e60
user: test
date: Thu Jan 01 00:00:00 1970 +0000
- trouble: unstable, bumped
+ instability: orphan, phase-divergent
summary: add babar
@@ -925,16 +925,16 @@
test the "troubles" templatekw
- $ hg log -r 'bumped() and unstable()'
- 7:50c51b361e60 (draft unstable bumped) [ ] add babar
+ $ hg log -r 'phasedivergent() and orphan()'
+ 7:50c51b361e60 (draft orphan phase-divergent) [ ] add babar
test the default cmdline template
- $ hg log -T default -r 'bumped()'
+ $ hg log -T default -r 'phasedivergent()'
changeset: 7:50c51b361e60
user: test
date: Thu Jan 01 00:00:00 1970 +0000
- trouble: unstable, bumped
+ instability: orphan, phase-divergent
summary: add babar
$ hg log -T default -r 'obsolete()'
@@ -945,19 +945,53 @@
summary: add obsolete_e
+test the obsolete labels
+
+ $ hg log --config ui.logtemplate= --color=debug -r 'phasedivergent()'
+ [log.changeset changeset.draft changeset.unstable instability.orphan instability.phase-divergent|changeset: 7:50c51b361e60]
+ [log.user|user: test]
+ [log.date|date: Thu Jan 01 00:00:00 1970 +0000]
+ [log.instability|instability: orphan, phase-divergent]
+ [log.summary|summary: add babar]
+
+
+ $ hg log -T default -r 'phasedivergent()' --color=debug
+ [log.changeset changeset.draft changeset.unstable instability.orphaninstability.phase-divergent|changeset: 7:50c51b361e60]
+ [log.user|user: test]
+ [log.date|date: Thu Jan 01 00:00:00 1970 +0000]
+ [log.instability|instability: orphan, phase-divergent]
+ [log.summary|summary: add babar]
+
+
+ $ hg log --config ui.logtemplate= --color=debug -r "obsolete()"
+ [log.changeset changeset.draft changeset.obsolete|changeset: 6:3de5eca88c00]
+ [log.parent changeset.draft|parent: 3:6f9641995072]
+ [log.user|user: test]
+ [log.date|date: Thu Jan 01 00:00:00 1970 +0000]
+ [log.summary|summary: add obsolete_e]
+
+
+ $ hg log -T default -r 'obsolete()' --color=debug
+ [log.changeset changeset.draft changeset.obsolete|changeset: 6:3de5eca88c00]
+ [log.parent changeset.draft|parent: 3:6f9641995072]
+ [log.user|user: test]
+ [log.date|date: Thu Jan 01 00:00:00 1970 +0000]
+ [log.summary|summary: add obsolete_e]
+
+
test summary output
- $ hg up -r 'bumped() and unstable()'
+ $ hg up -r 'phasedivergent() and orphan()'
1 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ hg summary
- parent: 7:50c51b361e60 (unstable, bumped)
+ parent: 7:50c51b361e60 (orphan, phase-divergent)
add babar
branch: default
commit: (clean)
update: 2 new changesets (update)
phases: 4 draft
- unstable: 2 changesets
- bumped: 1 changesets
+ orphan: 2 changesets
+ phase-divergent: 1 changesets
$ hg up -r 'obsolete()'
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ hg summary
@@ -967,8 +1001,8 @@
commit: (clean)
update: 3 new changesets (update)
phases: 4 draft
- unstable: 2 changesets
- bumped: 1 changesets
+ orphan: 2 changesets
+ phase-divergent: 1 changesets
Test incoming/outcoming with changesets obsoleted remotely, known locally
===============================================================================
@@ -995,18 +1029,18 @@
o 0:d20a80d4def3 (draft) [ ] base
$ hg log -G -R ../repo-issue3805
- @ 3:323a9c3ddd91 (draft) [tip ] A
+ @ 2:323a9c3ddd91 (draft) [tip ] A
|
o 0:d20a80d4def3 (draft) [ ] base
$ hg incoming
comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
searching for changes
- 3:323a9c3ddd91 (draft) [tip ] A
+ 2:323a9c3ddd91 (draft) [tip ] A
$ hg incoming --bundle ../issue3805.hg
comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
searching for changes
- 3:323a9c3ddd91 (draft) [tip ] A
+ 2:323a9c3ddd91 (draft) [tip ] A
$ hg outgoing
comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
searching for changes
@@ -1044,7 +1078,7 @@
adding manifests
adding file changes
added 2 changesets with 2 changes to 2 files
- 2 new obsolescence markers
+ 1 new obsolescence markers
$ hg out ../repo-issue3814
comparing with ../repo-issue3814
searching for changes
@@ -1055,7 +1089,7 @@
$ hg tag -l visible -r 1 --hidden
$ hg log -G
- @ 3:323a9c3ddd91 (draft) [tip ] A
+ @ 2:323a9c3ddd91 (draft) [tip ] A
|
| x 1:29f0c6921ddd (draft *obsolete*) [visible ] A
|/
@@ -1065,8 +1099,8 @@
$ hg tag -l -r tip tiptag
$ hg tags
- tiptag 3:323a9c3ddd91
- tip 3:323a9c3ddd91
+ tiptag 2:323a9c3ddd91
+ tip 2:323a9c3ddd91
visible 1:29f0c6921ddd
$ hg --config extensions.strip= strip -r tip --no-backup
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
@@ -1108,10 +1142,8 @@
$ echo "B+" >> foo
$ hg ci --amend -m "B+"
$ hg log -G --hidden
- @ 3:b7d587542d40 (draft) [tip ] B+
+ @ 2:b7d587542d40 (draft) [tip ] B+
|
- | x 2:eb95e9297e18 (draft *obsolete*) [ ] temporary amend commit for 44526ebb0f98
- | |
| x 1:44526ebb0f98 (draft *obsolete*) [ ] B
|/
o 0:4b34ecfb0d56 (draft) [ ] A
@@ -1123,9 +1155,9 @@
1:44526ebb0f98 (draft) [ ] B
2:c186d7714947 (draft) [tip ] C
$ hg log -G -R ../bundleoverlay.hg
- o 4:c186d7714947 (draft) [tip ] C
+ o 3:c186d7714947 (draft) [tip ] C
|
- | @ 3:b7d587542d40 (draft) [ ] B+
+ | @ 2:b7d587542d40 (draft) [ ] B+
|/
o 0:4b34ecfb0d56 (draft) [ ] A
@@ -1173,18 +1205,17 @@
Test heads computation on pending index changes with obsolescence markers
$ cd ..
$ cat >$TESTTMP/test_extension.py << EOF
+ > from __future__ import absolute_import
+ > from mercurial.i18n import _
> from mercurial import cmdutil, registrar
- > from mercurial.i18n import _
>
> cmdtable = {}
> command = registrar.command(cmdtable)
> @command(b"amendtransient",[], _('hg amendtransient [rev]'))
> def amend(ui, repo, *pats, **opts):
- > def commitfunc(ui, repo, message, match, opts):
- > return repo.commit(message, repo['.'].user(), repo['.'].date(), match)
> opts['message'] = 'Test'
> opts['logfile'] = None
- > cmdutil.amend(ui, repo, commitfunc, repo['.'], {}, pats, opts)
+ > cmdutil.amend(ui, repo, repo['.'], {}, pats, opts)
> ui.write('%s\n' % repo.changelog.headrevs())
> EOF
$ cat >> $HGRCPATH << EOF
@@ -1199,15 +1230,21 @@
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ echo aa > a
$ hg amendtransient
- [1, 3]
+ [1, 2]
Test cache consistency for the visible filter
1) We want to make sure that the cached filtered revs are invalidated when
bookmarks change
$ cd ..
$ cat >$TESTTMP/test_extension.py << EOF
+ > from __future__ import absolute_import, print_function
> import weakref
- > from mercurial import cmdutil, extensions, bookmarks, repoview
+ > from mercurial import (
+ > bookmarks,
+ > cmdutil,
+ > extensions,
+ > repoview,
+ > )
> def _bookmarkchanged(orig, bkmstoreinst, *args, **kwargs):
> reporef = weakref.ref(bkmstoreinst._repo)
> def trhook(tr):
@@ -1215,7 +1252,7 @@
> hidden1 = repoview.computehidden(repo)
> hidden = repoview.filterrevs(repo, 'visible')
> if sorted(hidden1) != sorted(hidden):
- > print "cache inconsistency"
+ > print("cache inconsistency")
> bkmstoreinst._repo.currenttransaction().addpostclose('test_extension', trhook)
> orig(bkmstoreinst, *args, **kwargs)
> def extsetup(ui):
@@ -1234,7 +1271,7 @@
$ hg commit --amend -m "message"
$ hg book bookb -r 13bedc178fce --hidden
$ hg log -r 13bedc178fce
- 5:13bedc178fce (draft *obsolete*) [ bookb] add b
+ 4:13bedc178fce (draft *obsolete*) [ bookb] add b
$ hg book -d bookb
$ hg log -r 13bedc178fce
abort: hidden revision '13bedc178fce'!
@@ -1263,19 +1300,17 @@
$ hg ci -m '2'
$ echo bar > f2
- $ hg commit --amend --config experimetnal.evolution=createmarkers
+ $ hg commit --amend --config experimetnal.stabilization=createmarkers
$ hg log -G
- @ 4:b0551702f918 (draft) [tip ] 2
+ @ 3:b0551702f918 (draft) [tip ] 2
|
o 1:e016b03fd86f (draft) [ ] 1
|
o 0:a78f55e5508c (draft) [ ] 0
$ hg log -G --hidden
- @ 4:b0551702f918 (draft) [tip ] 2
+ @ 3:b0551702f918 (draft) [tip ] 2
|
- | x 3:f27abbcc1f77 (draft *obsolete*) [ ] temporary amend commit for e008cf283490
- | |
| x 2:e008cf283490 (draft *obsolete*) [ ] 2
|/
o 1:e016b03fd86f (draft) [ ] 1
@@ -1284,10 +1319,9 @@
$ hg strip --hidden -r 2 --config extensions.strip= --config devel.strip-obsmarkers=no
- saved backup bundle to $TESTTMP/tmpe/issue4845/.hg/strip-backup/e008cf283490-39c978dc-backup.hg (glob)
+ saved backup bundle to $TESTTMP/tmpe/issue4845/.hg/strip-backup/e008cf283490-ede36964-backup.hg (glob)
$ hg debugobsolete
- e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (*) {'user': 'test'} (glob)
- f27abbcc1f77fb409cf9160482fe619541e2d605 0 {e008cf2834908e5d6b0f792a9d4b0e2272260fb8} (*) {'user': 'test'} (glob)
+ e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
$ hg log -G
@ 2:b0551702f918 (draft) [tip ] 2
|
@@ -1303,23 +1337,18 @@
o 0:a78f55e5508c (draft) [ ] 0
$ hg debugbundle .hg/strip-backup/e008cf283490-*-backup.hg
- Stream params: sortdict([('Compression', 'BZ')])
- changegroup -- "sortdict([('version', '02'), ('nbchanges', '2')])"
+ Stream params: {Compression: BZ}
+ changegroup -- {nbchanges: 1, version: 02}
e008cf2834908e5d6b0f792a9d4b0e2272260fb8
- f27abbcc1f77fb409cf9160482fe619541e2d605
- obsmarkers -- 'sortdict()'
- version: 1 (70 bytes)
- f27abbcc1f77fb409cf9160482fe619541e2d605 0 {e008cf2834908e5d6b0f792a9d4b0e2272260fb8} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
- phase-heads -- 'sortdict()'
- f27abbcc1f77fb409cf9160482fe619541e2d605 draft
+ phase-heads -- {}
+ e008cf2834908e5d6b0f792a9d4b0e2272260fb8 draft
$ hg pull .hg/strip-backup/e008cf283490-*-backup.hg
- pulling from .hg/strip-backup/e008cf283490-39c978dc-backup.hg
+ pulling from .hg/strip-backup/e008cf283490-ede36964-backup.hg
searching for changes
no changes found
$ hg debugobsolete
- e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (*) {'user': 'test'} (glob)
- f27abbcc1f77fb409cf9160482fe619541e2d605 0 {e008cf2834908e5d6b0f792a9d4b0e2272260fb8} (*) {'user': 'test'} (glob)
+ e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
$ hg log -G
@ 2:b0551702f918 (draft) [tip ] 2
|
@@ -1348,15 +1377,14 @@
@ 0:a78f55e5508c (draft) [tip ] 0
$ hg debugbundle .hg/strip-backup/e016b03fd86f-*-backup.hg
- Stream params: sortdict([('Compression', 'BZ')])
- changegroup -- "sortdict([('version', '02'), ('nbchanges', '2')])"
+ Stream params: {Compression: BZ}
+ changegroup -- {nbchanges: 2, version: 02}
e016b03fd86fcccc54817d120b90b751aaf367d6
b0551702f918510f01ae838ab03a463054c67b46
- obsmarkers -- 'sortdict()'
- version: 1 (139 bytes)
- e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
- f27abbcc1f77fb409cf9160482fe619541e2d605 0 {e008cf2834908e5d6b0f792a9d4b0e2272260fb8} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
- phase-heads -- 'sortdict()'
+ obsmarkers -- {}
+ version: 1 (86 bytes)
+ e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
+ phase-heads -- {}
b0551702f918510f01ae838ab03a463054c67b46 draft
$ hg unbundle .hg/strip-backup/e016b03fd86f-*-backup.hg
@@ -1364,11 +1392,10 @@
adding manifests
adding file changes
added 2 changesets with 2 changes to 2 files
- 2 new obsolescence markers
+ 1 new obsolescence markers
(run 'hg update' to get a working copy)
$ hg debugobsolete | sort
- e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (*) {'user': 'test'} (glob)
- f27abbcc1f77fb409cf9160482fe619541e2d605 0 {e008cf2834908e5d6b0f792a9d4b0e2272260fb8} (*) {'user': 'test'} (glob)
+ e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
$ hg log -G
o 2:b0551702f918 (draft) [tip ] 2
|
@@ -1402,9 +1429,9 @@
$ echo d > d
$ hg ci -Am d
adding d
- $ hg ci --amend -m dd --config experimental.evolution.track-operation=1
+ $ hg ci --amend -m dd --config experimental.stabilization.track-operation=1
$ hg debugobsolete --index --rev "3+7"
- 1 6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1 d27fb9b066076fd921277a4b9e8b9cb48c95bc6a 0 \(.*\) {'user': 'test'} (re)
+ 1 6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1 d27fb9b066076fd921277a4b9e8b9cb48c95bc6a 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
3 4715cf767440ed891755448016c2b8cf70760c30 7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d 0 \(.*\) {'operation': 'amend', 'user': 'test'} (re)
$ hg debugobsolete --index --rev "3+7" -Tjson
[
@@ -1412,8 +1439,8 @@
"date": [0.0, 0],
"flag": 0,
"index": 1,
- "metadata": {"user": "test"},
- "precnode": "6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1",
+ "metadata": {"operation": "amend", "user": "test"},
+ "prednode": "6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1",
"succnodes": ["d27fb9b066076fd921277a4b9e8b9cb48c95bc6a"]
},
{
@@ -1421,22 +1448,22 @@
"flag": 0,
"index": 3,
"metadata": {"operation": "amend", "user": "test"},
- "precnode": "4715cf767440ed891755448016c2b8cf70760c30",
+ "prednode": "4715cf767440ed891755448016c2b8cf70760c30",
"succnodes": ["7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d"]
}
]
Test the --delete option of debugobsolete command
$ hg debugobsolete --index
- 0 cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b f9bd49731b0b175e42992a3c8fa6c678b2bc11f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
- 1 6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1 d27fb9b066076fd921277a4b9e8b9cb48c95bc6a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
- 2 1ab51af8f9b41ef8c7f6f3312d4706d870b1fb74 29346082e4a9e27042b62d2da0e2de211c027621 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 0 cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b f9bd49731b0b175e42992a3c8fa6c678b2bc11f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
+ 1 6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1 d27fb9b066076fd921277a4b9e8b9cb48c95bc6a 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
+ 2 1ab51af8f9b41ef8c7f6f3312d4706d870b1fb74 29346082e4a9e27042b62d2da0e2de211c027621 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
3 4715cf767440ed891755448016c2b8cf70760c30 7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
$ hg debugobsolete --delete 1 --delete 3
deleted 2 obsolescence markers
$ hg debugobsolete
- cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b f9bd49731b0b175e42992a3c8fa6c678b2bc11f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
- 1ab51af8f9b41ef8c7f6f3312d4706d870b1fb74 29346082e4a9e27042b62d2da0e2de211c027621 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b f9bd49731b0b175e42992a3c8fa6c678b2bc11f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
+ 1ab51af8f9b41ef8c7f6f3312d4706d870b1fb74 29346082e4a9e27042b62d2da0e2de211c027621 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
Test adding changeset after obsmarkers affecting it
(eg: during pull, or unbundle)
--- a/tests/test-pager.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-pager.t Sat Sep 30 07:52:48 2017 -0700
@@ -340,9 +340,25 @@
9: a 9
10: a 10
+During pushbuffer, pager should not start:
+ $ cat > $TESTTMP/pushbufferpager.py <<EOF
+ > def uisetup(ui):
+ > ui.pushbuffer()
+ > ui.pager('mycmd')
+ > ui.write('content\n')
+ > ui.write(ui.popbuffer())
+ > EOF
+
+ $ echo append >> a
+ $ hg --config extensions.pushbuffer=$TESTTMP/pushbufferpager.py status --color=off
+ content
+ paged! 'M a\n'
+
Environment variables like LESS and LV are set automatically:
$ cat > $TESTTMP/printlesslv.py <<EOF
- > import os, sys
+ > from __future__ import absolute_import
+ > import os
+ > import sys
> sys.stdin.read()
> for name in ['LESS', 'LV']:
> sys.stdout.write(('%s=%s\n') % (name, os.environ.get(name, '-')))
--- a/tests/test-patch.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-patch.t Sat Sep 30 07:52:48 2017 -0700
@@ -1,8 +1,9 @@
$ cat > patchtool.py <<EOF
+ > from __future__ import absolute_import, print_function
> import sys
- > print 'Using custom patch'
+ > print('Using custom patch')
> if '--binary' in sys.argv:
- > print '--binary found !'
+ > print('--binary found !')
> EOF
$ echo "[ui]" >> $HGRCPATH
--- a/tests/test-patchbomb-bookmark.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-patchbomb-bookmark.t Sat Sep 30 07:52:48 2017 -0700
@@ -31,8 +31,8 @@
Cc:
displaying [PATCH 0 of 2] bookmark ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH 0 of 2] bookmark
Message-Id: <patchbomb.347155260@*> (glob)
@@ -43,8 +43,8 @@
displaying [PATCH 1 of 2] first ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH 1 of 2] first
X-Mercurial-Node: accde9b8b6dce861c185d0825c1affc09a79cb26
@@ -74,8 +74,8 @@
+first
displaying [PATCH 2 of 2] second ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH 2 of 2] second
X-Mercurial-Node: 417defd1559c396ba06a44dce8dc1c2d2d653f3f
@@ -138,8 +138,8 @@
Cc:
displaying [PATCH] bookmark ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH] bookmark
X-Mercurial-Node: 8dab2639fd35f1e337ad866c372a5c44f1064e3c
--- a/tests/test-patchbomb.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-patchbomb.t Sat Sep 30 07:52:48 2017 -0700
@@ -9,18 +9,19 @@
--===+[0-9]+=+$ -> --===*= (glob)
$ cat > prune-blank-after-boundary.py <<EOF
+ > from __future__ import absolute_import, print_function
> import sys
> skipblank = False
> trim = lambda x: x.strip(' \r\n')
> for l in sys.stdin:
> if trim(l).endswith('=--') or trim(l).endswith('=='):
> skipblank = True
- > print l,
+ > print(l, end='')
> continue
> if not trim(l) and skipblank:
> continue
> skipblank = False
- > print l,
+ > print(l, end='')
> EOF
$ FILTERBOUNDARY="$PYTHON `pwd`/prune-blank-after-boundary.py"
$ echo "[format]" >> $HGRCPATH
@@ -39,8 +40,8 @@
displaying [PATCH] a ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH] a
X-Mercurial-Node: 8580ff50825a50c8f716709acdf8de0deddcd6ab
@@ -69,6 +70,45 @@
+a
+If --to is specified on the command line, it should override any
+email.to config setting. Same for --cc:
+
+ $ hg email --date '1970-1-1 0:1' -n -f quux --to foo --cc bar -r tip \
+ > --config email.to=bob@example.com --config email.cc=alice@example.com
+ this patch series consists of 1 patches.
+
+
+ displaying [PATCH] a ...
+ MIME-Version: 1.0
+ Content-Type: text/plain; charset="us-ascii"
+ Content-Transfer-Encoding: 7bit
+ Subject: [PATCH] a
+ X-Mercurial-Node: 8580ff50825a50c8f716709acdf8de0deddcd6ab
+ X-Mercurial-Series-Index: 1
+ X-Mercurial-Series-Total: 1
+ Message-Id: <*@*> (glob)
+ X-Mercurial-Series-Id: <*@*> (glob)
+ User-Agent: Mercurial-patchbomb/* (glob)
+ Date: Thu, 01 Jan 1970 00:01:00 +0000
+ From: quux
+ To: foo
+ Cc: bar
+
+ # HG changeset patch
+ # User test
+ # Date 1 0
+ # Thu Jan 01 00:00:01 1970 +0000
+ # Node ID 8580ff50825a50c8f716709acdf8de0deddcd6ab
+ # Parent 0000000000000000000000000000000000000000
+ a
+
+ diff -r 000000000000 -r 8580ff50825a a
+ --- /dev/null Thu Jan 01 00:00:00 1970 +0000
+ +++ b/a Thu Jan 01 00:00:01 1970 +0000
+ @@ -0,0 +1,1 @@
+ +a
+
+
$ hg --config ui.interactive=1 email --confirm -n -f quux -t foo -c bar -r tip<<EOF
> n
> EOF
@@ -114,8 +154,8 @@
displaying [PATCH] a ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH] a
X-Mercurial-Node: 8580ff50825a50c8f716709acdf8de0deddcd6ab
@@ -152,8 +192,8 @@
displaying [PATCH] a ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH] a
X-Mercurial-Node: 8580ff50825a50c8f716709acdf8de0deddcd6ab
@@ -194,8 +234,8 @@
displaying [PATCH 0 of 2] test ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH 0 of 2] test
Message-Id: <patchbomb.120@*> (glob)
@@ -207,8 +247,8 @@
displaying [PATCH 1 of 2] a ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH 1 of 2] a
X-Mercurial-Node: 8580ff50825a50c8f716709acdf8de0deddcd6ab
@@ -239,8 +279,8 @@
+a
displaying [PATCH 2 of 2] b ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH 2 of 2] b
X-Mercurial-Node: 97d72e5f12c7e84f85064aa72e5a297142c36ed9
@@ -283,7 +323,8 @@
$ hg email -m test.mbox -f quux -t foo -c bar -s test 0:tip \
> --config extensions.progress= --config progress.assume-tty=1 \
> --config progress.delay=0 --config progress.refresh=0 \
- > --config progress.width=60
+ > --config progress.width=60 \
+ > --config extensions.mocktime=$TESTDIR/mocktime.py
this patch series consists of 2 patches.
@@ -293,10 +334,10 @@
sending [ ] 0/3\r (no-eol) (esc)
\r (no-eol) (esc)
\r (no-eol) (esc)
- sending [==============> ] 1/3\r (no-eol) (esc)
+ sending [============> ] 1/3 01s\r (no-eol) (esc)
\r (no-eol) (esc)
\r (no-eol) (esc)
- sending [=============================> ] 2/3\r (no-eol) (esc)
+ sending [==========================> ] 2/3 01s\r (no-eol) (esc)
\r (esc)
sending [PATCH 0 of 2] test ...
sending [PATCH 1 of 2] a ...
@@ -335,8 +376,8 @@
Cc: bar
--===*= (glob)
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
a multiline
@@ -380,8 +421,8 @@
Cc: bar
--===*= (glob)
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
a multiline
@@ -413,8 +454,8 @@
displaying [PATCH] utf-8 content ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 8bit
Subject: [PATCH] utf-8 content
X-Mercurial-Node: 909a00e13e9d78b575aeee23dddbada46d5a143f
@@ -459,8 +500,8 @@
$ cat mbox
From quux ... ... .. ..:..:.. .... (re)
+ MIME-Version: 1.0
Content-Type: text/plain; charset="utf-8"
- MIME-Version: 1.0
Content-Transfer-Encoding: base64
Subject: [PATCH] utf-8 content
X-Mercurial-Node: 909a00e13e9d78b575aeee23dddbada46d5a143f
@@ -521,8 +562,8 @@
displaying [PATCH] long line ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: quoted-printable
Subject: [PATCH] long line
X-Mercurial-Node: a2ea8fc83dd8b93cfd86ac97b28287204ab806e1
@@ -575,8 +616,8 @@
sending [PATCH] long line ...
$ cat mbox
From quux ... ... .. ..:..:.. .... (re)
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: quoted-printable
Subject: [PATCH] long line
X-Mercurial-Node: a2ea8fc83dd8b93cfd86ac97b28287204ab806e1
@@ -637,8 +678,8 @@
sending [PATCH] isolatin 8-bit encoding ...
$ cat mbox
From quux ... ... .. ..:..:.. .... (re)
+ MIME-Version: 1.0
Content-Type: text/plain; charset="iso-8859-1"
- MIME-Version: 1.0
Content-Transfer-Encoding: quoted-printable
Subject: [PATCH] isolatin 8-bit encoding
X-Mercurial-Node: 240fb913fc1b7ff15ddb9f33e73d82bf5277c720
@@ -685,8 +726,8 @@
are you sure you want to send (yn)? y
displaying [PATCH] test ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH] test
X-Mercurial-Node: ff2c9fa2018b15fa74b33363bda9527323e2a99f
@@ -747,8 +788,8 @@
are you sure you want to send (yn)? y
displaying [PATCH 0 of 2] test ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH 0 of 2] test
Message-Id: <patchbomb.60@*> (glob)
@@ -764,8 +805,8 @@
2 files changed, 2 insertions(+), 0 deletions(-)
displaying [PATCH 1 of 2] a ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH 1 of 2] a
X-Mercurial-Node: 8580ff50825a50c8f716709acdf8de0deddcd6ab
@@ -800,8 +841,8 @@
+a
displaying [PATCH 2 of 2] b ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH 2 of 2] b
X-Mercurial-Node: 97d72e5f12c7e84f85064aa72e5a297142c36ed9
@@ -857,8 +898,8 @@
Cc: bar
--===*= (glob)
+ MIME-Version: 1.0
Content-Type: text/x-patch; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Content-Disposition: inline; filename=t2.patch
@@ -900,8 +941,8 @@
Cc: bar
--===*= (glob)
+ MIME-Version: 1.0
Content-Type: text/x-patch; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: quoted-printable
Content-Disposition: inline; filename=t2.patch
@@ -947,8 +988,8 @@
displaying [PATCH 0 of 3] test ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH 0 of 3] test
Message-Id: <patchbomb.60@*> (glob)
@@ -977,8 +1018,8 @@
Cc: bar
--===*= (glob)
+ MIME-Version: 1.0
Content-Type: text/x-patch; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Content-Disposition: inline; filename=t2-1.patch
@@ -1015,8 +1056,8 @@
Cc: bar
--===*= (glob)
+ MIME-Version: 1.0
Content-Type: text/x-patch; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Content-Disposition: inline; filename=t2-2.patch
@@ -1053,8 +1094,8 @@
Cc: bar
--===*= (glob)
+ MIME-Version: 1.0
Content-Type: text/x-patch; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: quoted-printable
Content-Disposition: inline; filename=t2-3.patch
@@ -1111,8 +1152,8 @@
Cc: bar
--===*= (glob)
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Patch subject is complete summary.
@@ -1120,8 +1161,8 @@
--===*= (glob)
+ MIME-Version: 1.0
Content-Type: text/x-patch; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Content-Disposition: attachment; filename=t2.patch
@@ -1162,8 +1203,8 @@
Cc: bar
--===*= (glob)
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Patch subject is complete summary.
@@ -1171,8 +1212,8 @@
--===*= (glob)
+ MIME-Version: 1.0
Content-Type: text/x-patch; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: quoted-printable
Content-Disposition: attachment; filename=t2.patch
@@ -1229,8 +1270,8 @@
Cc: bar
--===*= (glob)
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
# HG changeset patch
@@ -1248,8 +1289,8 @@
+c
--===*= (glob)
+ MIME-Version: 1.0
Content-Type: text/x-patch; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Content-Disposition: attachment; filename=t2.patch
@@ -1279,8 +1320,8 @@
displaying [PATCH 0 of 3] test ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH 0 of 3] test
Message-Id: <patchbomb.60@*> (glob)
@@ -1309,8 +1350,8 @@
Cc: bar
--===*= (glob)
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Patch subject is complete summary.
@@ -1318,8 +1359,8 @@
--===*= (glob)
+ MIME-Version: 1.0
Content-Type: text/x-patch; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Content-Disposition: attachment; filename=t2-1.patch
@@ -1356,8 +1397,8 @@
Cc: bar
--===*= (glob)
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Patch subject is complete summary.
@@ -1365,8 +1406,8 @@
--===*= (glob)
+ MIME-Version: 1.0
Content-Type: text/x-patch; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Content-Disposition: attachment; filename=t2-2.patch
@@ -1403,8 +1444,8 @@
Cc: bar
--===*= (glob)
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Patch subject is complete summary.
@@ -1412,8 +1453,8 @@
--===*= (glob)
+ MIME-Version: 1.0
Content-Type: text/x-patch; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: quoted-printable
Content-Disposition: attachment; filename=t2-3.patch
@@ -1459,8 +1500,8 @@
displaying [PATCH 0 of 1] test ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH 0 of 1] test
Message-Id: <patchbomb.60@*> (glob)
@@ -1472,8 +1513,8 @@
displaying [PATCH 1 of 1] c ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH 1 of 1] c
X-Mercurial-Node: ff2c9fa2018b15fa74b33363bda9527323e2a99f
@@ -1512,8 +1553,8 @@
displaying [PATCH 0 of 1] test ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH 0 of 1] test
Message-Id: <patchbomb.60@*> (glob)
@@ -1526,8 +1567,8 @@
foo
displaying [PATCH 1 of 1] c ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH 1 of 1] c
X-Mercurial-Node: ff2c9fa2018b15fa74b33363bda9527323e2a99f
@@ -1568,8 +1609,8 @@
displaying [PATCH 0 of 2] test ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH 0 of 2] test
Message-Id: <patchbomb.60@*> (glob)
@@ -1581,8 +1622,8 @@
displaying [PATCH 1 of 2] a ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH 1 of 2] a
X-Mercurial-Node: 8580ff50825a50c8f716709acdf8de0deddcd6ab
@@ -1613,8 +1654,8 @@
+a
displaying [PATCH 2 of 2] b ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH 2 of 2] b
X-Mercurial-Node: 97d72e5f12c7e84f85064aa72e5a297142c36ed9
@@ -1652,8 +1693,8 @@
displaying [PATCH] test ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH] test
X-Mercurial-Node: ff2c9fa2018b15fa74b33363bda9527323e2a99f
@@ -1690,8 +1731,8 @@
displaying [PATCH] test ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH] test
X-Mercurial-Node: ff2c9fa2018b15fa74b33363bda9527323e2a99f
@@ -1748,8 +1789,8 @@
Cc: bar
--===*= (glob)
+ MIME-Version: 1.0
Content-Type: text/x-patch; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Content-Disposition: inline; filename=two.diff
@@ -1779,8 +1820,8 @@
displaying [PATCH 0 of 2] test ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH 0 of 2] test
Message-Id: <patchbomb.60@*> (glob)
@@ -1809,8 +1850,8 @@
Cc: bar
--===*= (glob)
+ MIME-Version: 1.0
Content-Type: text/x-patch; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Content-Disposition: inline; filename=t2-1.patch
@@ -1847,8 +1888,8 @@
Cc: bar
--===*= (glob)
+ MIME-Version: 1.0
Content-Type: text/x-patch; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Content-Disposition: inline; filename=one.patch
@@ -1876,8 +1917,8 @@
displaying [PATCH] Added tag two, two.diff for changeset ff2c9fa2018b ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH] Added tag two, two.diff for changeset ff2c9fa2018b
X-Mercurial-Node: 7aead2484924c445ad8ce2613df91f52f9e502ed
@@ -1919,8 +1960,8 @@
(optional) Subject: [PATCH 0 of 2]
displaying [PATCH 1 of 2] a ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH 1 of 2] a
X-Mercurial-Node: 8580ff50825a50c8f716709acdf8de0deddcd6ab
@@ -1951,8 +1992,8 @@
+a
displaying [PATCH 2 of 2] b ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH 2 of 2] b
X-Mercurial-Node: 97d72e5f12c7e84f85064aa72e5a297142c36ed9
@@ -1994,8 +2035,8 @@
displaying [PATCH 0 of 2] test ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH 0 of 2] test
Message-Id: <patchbomb.60@*> (glob)
@@ -2009,8 +2050,8 @@
displaying [PATCH 1 of 2] a ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH 1 of 2] a
X-Mercurial-Node: 8580ff50825a50c8f716709acdf8de0deddcd6ab
@@ -2041,8 +2082,8 @@
+a
displaying [PATCH 2 of 2] b ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH 2 of 2] b
X-Mercurial-Node: 97d72e5f12c7e84f85064aa72e5a297142c36ed9
@@ -2082,8 +2123,8 @@
displaying [PATCH fooFlag] test ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH fooFlag] test
X-Mercurial-Node: ff2c9fa2018b15fa74b33363bda9527323e2a99f
@@ -2123,8 +2164,8 @@
displaying [PATCH 0 of 2 fooFlag] test ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH 0 of 2 fooFlag] test
Message-Id: <patchbomb.60@*> (glob)
@@ -2136,8 +2177,8 @@
displaying [PATCH 1 of 2 fooFlag] a ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH 1 of 2 fooFlag] a
X-Mercurial-Node: 8580ff50825a50c8f716709acdf8de0deddcd6ab
@@ -2168,8 +2209,8 @@
+a
displaying [PATCH 2 of 2 fooFlag] b ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH 2 of 2 fooFlag] b
X-Mercurial-Node: 97d72e5f12c7e84f85064aa72e5a297142c36ed9
@@ -2209,8 +2250,8 @@
displaying [PATCH fooFlag barFlag] test ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH fooFlag barFlag] test
X-Mercurial-Node: ff2c9fa2018b15fa74b33363bda9527323e2a99f
@@ -2249,8 +2290,8 @@
displaying [PATCH 0 of 2 fooFlag barFlag] test ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH 0 of 2 fooFlag barFlag] test
Message-Id: <patchbomb.60@*> (glob)
@@ -2262,8 +2303,8 @@
displaying [PATCH 1 of 2 fooFlag barFlag] a ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH 1 of 2 fooFlag barFlag] a
X-Mercurial-Node: 8580ff50825a50c8f716709acdf8de0deddcd6ab
@@ -2294,8 +2335,8 @@
+a
displaying [PATCH 2 of 2 fooFlag barFlag] b ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH 2 of 2 fooFlag barFlag] b
X-Mercurial-Node: 97d72e5f12c7e84f85064aa72e5a297142c36ed9
@@ -2336,8 +2377,8 @@
sending [PATCH] test ...
$ cat < tmp.mbox
From quux ... ... .. ..:..:.. .... (re)
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH] test
X-Mercurial-Node: 8580ff50825a50c8f716709acdf8de0deddcd6ab
@@ -2378,8 +2419,8 @@
Cc:
displaying [PATCH 0 of 2 R1] test ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH 0 of 2 R1] test
Message-Id: <patchbomb.60@*> (glob)
@@ -2391,8 +2432,8 @@
foo
displaying [PATCH 1 of 2 R0] a ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH 1 of 2 R0] a
X-Mercurial-Node: 8580ff50825a50c8f716709acdf8de0deddcd6ab
@@ -2422,8 +2463,8 @@
+a
displaying [PATCH 2 of 2 R1] b ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH 2 of 2 R1] b
X-Mercurial-Node: 97d72e5f12c7e84f85064aa72e5a297142c36ed9
@@ -2461,8 +2502,8 @@
Cc:
displaying [PATCH default V2] a ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH default V2] a
X-Mercurial-Node: 8580ff50825a50c8f716709acdf8de0deddcd6ab
@@ -2503,8 +2544,8 @@
$ cat tmp.mbox
From quux ... ... .. ..:..:.. .... (re)
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH] test
X-Mercurial-Node: 8580ff50825a50c8f716709acdf8de0deddcd6ab
@@ -2581,8 +2622,8 @@
Cc:
displaying [PATCH 0 of 6] test ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH 0 of 6] test
Message-Id: <patchbomb.315532860@*> (glob)
@@ -2593,8 +2634,8 @@
displaying [PATCH 1 of 6] c ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH 1 of 6] c
X-Mercurial-Node: ff2c9fa2018b15fa74b33363bda9527323e2a99f
@@ -2624,8 +2665,8 @@
+c
displaying [PATCH 2 of 6] utf-8 content ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 8bit
Subject: [PATCH 2 of 6] utf-8 content
X-Mercurial-Node: 909a00e13e9d78b575aeee23dddbada46d5a143f
@@ -2662,8 +2703,8 @@
+h\xc3\xb6mma! (esc)
displaying [PATCH 3 of 6] long line ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: quoted-printable
Subject: [PATCH 3 of 6] long line
X-Mercurial-Node: a2ea8fc83dd8b93cfd86ac97b28287204ab806e1
@@ -2709,8 +2750,8 @@
+bar
displaying [PATCH 4 of 6] isolatin 8-bit encoding ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 8bit
Subject: [PATCH 4 of 6] isolatin 8-bit encoding
X-Mercurial-Node: 240fb913fc1b7ff15ddb9f33e73d82bf5277c720
@@ -2740,8 +2781,8 @@
+h\xf6mma! (esc)
displaying [PATCH 5 of 6] Added tag zero, zero.foo for changeset 8580ff50825a ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH 5 of 6] Added tag zero, zero.foo for changeset 8580ff50825a
X-Mercurial-Node: 5d5ef15dfe5e7bd3a4ee154b5fff76c7945ec433
@@ -2772,8 +2813,8 @@
+8580ff50825a50c8f716709acdf8de0deddcd6ab zero.foo
displaying [PATCH 6 of 6] d ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH 6 of 6] d
X-Mercurial-Node: 2f9fa9b998c5fe3ac2bd9a2b14bfcbeecbc7c268
@@ -2817,8 +2858,8 @@
displaying [PATCH] test ...
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH] test
X-Mercurial-Node: 2f9fa9b998c5fe3ac2bd9a2b14bfcbeecbc7c268
@@ -2952,8 +2993,8 @@
warning: invalid patchbomb.intro value "mpmwearaclownnose"
(should be one of always, never, auto)
-f test foo
+ MIME-Version: 1.0
Content-Type: text/plain; charset="us-ascii"
- MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: [PATCH] test
X-Mercurial-Node: 3b6f1ec9dde933a40a115a7990f8b320477231af
--- a/tests/test-pathencode.py Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-pathencode.py Sat Sep 30 07:52:48 2017 -0700
@@ -19,6 +19,11 @@
store,
)
+try:
+ xrange
+except NameError:
+ xrange = range
+
validchars = set(map(chr, range(0, 256)))
alphanum = range(ord('A'), ord('Z'))
@@ -183,7 +188,7 @@
if o in ('-c', '--count'):
count = int(a)
elif o in ('-s', '--seed'):
- seed = long(a, base=0) # accepts base 10 or 16 strings
+ seed = int(a, base=0) # accepts base 10 or 16 strings
elif o == '--build':
buildprobtable(sys.stdout,
'find .hg/store/data -type f && '
@@ -192,9 +197,9 @@
if seed is None:
try:
- seed = long(binascii.hexlify(os.urandom(16)), 16)
+ seed = int(binascii.hexlify(os.urandom(16)), 16)
except AttributeError:
- seed = long(time.time() * 1000)
+ seed = int(time.time() * 1000)
rng = random.Random(seed)
if runtests(rng, seed, count):
--- a/tests/test-phases.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-phases.t Sat Sep 30 07:52:48 2017 -0700
@@ -588,7 +588,7 @@
(enabling evolution)
$ cat >> $HGRCPATH << EOF
> [experimental]
- > evolution=createmarkers
+ > stabilization=createmarkers
> EOF
(making a changeset hidden; H in that case)
--- a/tests/test-progress.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-progress.t Sat Sep 30 07:52:48 2017 -0700
@@ -1,7 +1,8 @@
$ cat > loop.py <<EOF
+ > from __future__ import absolute_import
+ > import time
> from mercurial import commands, registrar
- > import time
>
> cmdtable = {}
> command = registrar.command(cmdtable)
@@ -184,25 +185,9 @@
#if no-chg
- $ cat > mocktime.py <<EOF
- > import os
- > import time
- >
- > class mocktime(object):
- > def __init__(self, increment):
- > self.time = 0
- > self.increment = increment
- > def __call__(self):
- > self.time += self.increment
- > return self.time
- >
- > def uisetup(ui):
- > time.time = mocktime(int(os.environ.get('MOCKTIME', '11')))
- > EOF
-
$ cp $HGRCPATH.orig $HGRCPATH
$ echo "[extensions]" >> $HGRCPATH
- $ echo "mocktime=`pwd`/mocktime.py" >> $HGRCPATH
+ $ echo "mocktime=$TESTDIR/mocktime.py" >> $HGRCPATH
$ echo "progress=" >> $HGRCPATH
$ echo "loop=`pwd`/loop.py" >> $HGRCPATH
$ echo "[progress]" >> $HGRCPATH
@@ -210,7 +195,7 @@
$ echo "delay=25" >> $HGRCPATH
$ echo "width=60" >> $HGRCPATH
- $ hg -y loop 8
+ $ MOCKTIME=11 hg -y loop 8
\r (no-eol) (esc)
loop [=========> ] 2/8 1m07s\r (no-eol) (esc)
loop [===============> ] 3/8 56s\r (no-eol) (esc)
@@ -245,8 +230,33 @@
loop [=============================> ] 3/4 23w02d\r (no-eol) (esc)
\r (no-eol) (esc)
+Non-linear progress:
+
+ $ MOCKTIME='20 20 20 20 20 20 20 20 20 20 500 500 500 500 500 20 20 20 20 20' hg -y loop 20
+ \r (no-eol) (esc)
+ loop [=> ] 1/20 6m21s\r (no-eol) (esc)
+ loop [===> ] 2/20 6m01s\r (no-eol) (esc)
+ loop [=====> ] 3/20 5m41s\r (no-eol) (esc)
+ loop [=======> ] 4/20 5m21s\r (no-eol) (esc)
+ loop [=========> ] 5/20 5m01s\r (no-eol) (esc)
+ loop [===========> ] 6/20 4m41s\r (no-eol) (esc)
+ loop [=============> ] 7/20 4m21s\r (no-eol) (esc)
+ loop [===============> ] 8/20 4m01s\r (no-eol) (esc)
+ loop [================> ] 9/20 25m40s\r (no-eol) (esc)
+ loop [===================> ] 10/20 1h06m\r (no-eol) (esc)
+ loop [=====================> ] 11/20 1h13m\r (no-eol) (esc)
+ loop [=======================> ] 12/20 1h07m\r (no-eol) (esc)
+ loop [========================> ] 13/20 58m19s\r (no-eol) (esc)
+ loop [===========================> ] 14/20 7m09s\r (no-eol) (esc)
+ loop [=============================> ] 15/20 3m38s\r (no-eol) (esc)
+ loop [===============================> ] 16/20 2m15s\r (no-eol) (esc)
+ loop [=================================> ] 17/20 1m27s\r (no-eol) (esc)
+ loop [====================================> ] 18/20 52s\r (no-eol) (esc)
+ loop [======================================> ] 19/20 25s\r (no-eol) (esc)
+ \r (no-eol) (esc)
+
Time estimates should not fail when there's no end point:
- $ hg -y loop -- -4
+ $ MOCKTIME=11 hg -y loop -- -4
\r (no-eol) (esc)
loop [ <=> ] 2\r (no-eol) (esc)
loop [ <=> ] 3\r (no-eol) (esc)
--- a/tests/test-push-http.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-push-http.t Sat Sep 30 07:52:48 2017 -0700
@@ -172,4 +172,20 @@
% serve errors
[255]
+ $ cat > .hg/hgrc <<EOF
+ > [web]
+ > push_ssl = false
+ > allow_push = *
+ > [experimental]
+ > httppostargs=true
+ > EOF
+ $ req
+ pushing to http://localhost:$HGPORT/
+ searching for changes
+ remote: adding changesets
+ remote: adding manifests
+ remote: adding file changes
+ remote: added 1 changesets with 1 changes to 1 files
+ % serve errors
+
$ cd ..
--- a/tests/test-push-race.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-push-race.t Sat Sep 30 07:52:48 2017 -0700
@@ -98,7 +98,7 @@
> [phases]
> publish = no
> [experimental]
- > evolution = all
+ > stabilization = all
> [alias]
> graph = log -G --rev 'sort(all(), "topo")'
> EOF
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-pushvars.t Sat Sep 30 07:52:48 2017 -0700
@@ -0,0 +1,71 @@
+Setup
+
+ $ PYTHONPATH=$TESTDIR/..:$PYTHONPATH
+ $ export PYTHONPATH
+
+ $ cat > $TESTTMP/pretxnchangegroup.sh << EOF
+ > #!/bin/sh
+ > env | egrep "^HG_USERVAR_(DEBUG|BYPASS_REVIEW)" | sort
+ > exit 0
+ > EOF
+ $ cat >> $HGRCPATH << EOF
+ > [hooks]
+ > pretxnchangegroup = sh $TESTTMP/pretxnchangegroup.sh
+ > [experimental]
+ > bundle2-exp = true
+ > EOF
+
+ $ hg init repo
+ $ hg clone -q repo child
+ $ cd child
+
+Test pushing vars to repo with pushvars.server not set
+
+ $ echo b > a
+ $ hg commit -Aqm a
+ $ hg push --pushvars "DEBUG=1" --pushvars "BYPASS_REVIEW=true"
+ pushing to $TESTTMP/repo (glob)
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+
+Setting pushvars.sever = true and then pushing.
+
+ $ echo [push] >> $HGRCPATH
+ $ echo "pushvars.server = true" >> $HGRCPATH
+ $ echo b >> a
+ $ hg commit -Aqm a
+ $ hg push --pushvars "DEBUG=1" --pushvars "BYPASS_REVIEW=true"
+ pushing to $TESTTMP/repo (glob)
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+ HG_USERVAR_BYPASS_REVIEW=true
+ HG_USERVAR_DEBUG=1
+
+Test pushing var with empty right-hand side
+
+ $ echo b >> a
+ $ hg commit -Aqm a
+ $ hg push --pushvars "DEBUG="
+ pushing to $TESTTMP/repo (glob)
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+ HG_USERVAR_DEBUG=
+
+Test pushing bad vars
+
+ $ echo b >> a
+ $ hg commit -Aqm b
+ $ hg push --pushvars "DEBUG"
+ pushing to $TESTTMP/repo (glob)
+ searching for changes
+ abort: unable to parse variable 'DEBUG', should follow 'KEY=VALUE' or 'KEY=' format
+ [255]
--- a/tests/test-qrecord.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-qrecord.t Sat Sep 30 07:52:48 2017 -0700
@@ -79,6 +79,7 @@
-w --ignore-all-space ignore white space when comparing lines
-b --ignore-space-change ignore changes in the amount of white space
-B --ignore-blank-lines ignore changes whose lines are all blank
+ -Z --ignore-space-at-eol ignore changes in whitespace at EOL
(some details hidden, use --verbose to show complete help)
@@ -152,6 +153,7 @@
-w --ignore-all-space ignore white space when comparing lines
-b --ignore-space-change ignore changes in the amount of white space
-B --ignore-blank-lines ignore changes whose lines are all blank
+ -Z --ignore-space-at-eol ignore changes in whitespace at EOL
--mq operate on patch repository
(some details hidden, use --verbose to show complete help)
--- a/tests/test-rebase-abort.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-rebase-abort.t Sat Sep 30 07:52:48 2017 -0700
@@ -266,7 +266,7 @@
$ hg rebase -d master -r foo
- rebasing 3:6c0f977a22d8 "C" (tip foo)
+ rebasing 3:6c0f977a22d8 "C" (foo tip)
merging c
warning: conflicts while merging c! (edit, then use 'hg resolve --mark')
unresolved conflicts (see hg resolve, then hg rebase --continue)
@@ -306,7 +306,7 @@
created new head
$ hg rebase -d @ -b foo --tool=internal:fail
- rebasing 2:070cf4580bb5 "b2" (tip foo)
+ rebasing 2:070cf4580bb5 "b2" (foo tip)
unresolved conflicts (see hg resolve, then hg rebase --continue)
[1]
--- a/tests/test-rebase-base.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-rebase-base.t Sat Sep 30 07:52:48 2017 -0700
@@ -379,3 +379,40 @@
/
o 0: A
+Rebasing using a single transaction
+
+ $ hg init singletr && cd singletr
+ $ cat >> .hg/hgrc <<EOF
+ > [rebase]
+ > singletransaction=True
+ > EOF
+ $ hg debugdrawdag <<'EOF'
+ > Z
+ > |
+ > | D
+ > | |
+ > | C
+ > | |
+ > Y B
+ > |/
+ > A
+ > EOF
+- We should only see two status stored messages. One from the start, one from
+- the end.
+ $ hg rebase --debug -b D -d Z | grep 'status stored'
+ rebase status stored
+ rebase status stored
+ $ hg tglog
+ o 5: D
+ |
+ o 4: C
+ |
+ o 3: B
+ |
+ o 2: Z
+ |
+ o 1: Y
+ |
+ o 0: A
+
+ $ cd ..
--- a/tests/test-rebase-bookmarks.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-rebase-bookmarks.t Sat Sep 30 07:52:48 2017 -0700
@@ -97,7 +97,7 @@
$ hg book W@diverge
$ hg rebase -s W -d .
- rebasing 3:41acb9dca9eb "D" (tip W)
+ rebasing 3:41acb9dca9eb "D" (W tip)
saved backup bundle to $TESTTMP/a4/.hg/strip-backup/41acb9dca9eb-b35a6a63-rebase.hg (glob)
$ hg bookmarks
@@ -209,7 +209,7 @@
$ hg rebase -r '"bisect"^^::"bisect"^' -r bisect -d Z
rebasing 5:345c90f326a4 "bisect"
rebasing 6:f677a2907404 "bisect2"
- rebasing 7:325c16001345 "bisect3" (tip bisect)
+ rebasing 7:325c16001345 "bisect3" (bisect tip)
saved backup bundle to $TESTTMP/a3/.hg/strip-backup/345c90f326a4-b4840586-rebase.hg (glob)
Bookmark and working parent get moved even if --keep is set (issue5682)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-rebase-brute-force.t Sat Sep 30 07:52:48 2017 -0700
@@ -0,0 +1,55 @@
+ $ cat >> $HGRCPATH <<EOF
+ > [extensions]
+ > drawdag=$TESTDIR/drawdag.py
+ > bruterebase=$TESTDIR/bruterebase.py
+ > [experimental]
+ > evolution=createmarkers,allowunstable
+ > EOF
+ $ init() {
+ > N=`expr ${N:-0} + 1`
+ > cd $TESTTMP && hg init repo$N && cd repo$N
+ > hg debugdrawdag
+ > }
+
+Source looks like "N"
+
+ $ init <<'EOS'
+ > C D
+ > |\|
+ > A B Z
+ > EOS
+
+ $ hg debugbruterebase 'all()-Z' Z
+ A: A':Z
+ B: B':Z
+ AB: A':Z B':Z
+ C: ABORT: cannot rebase 3:a35c07e8a2a4 without moving at least one of its parents
+ AC: A':Z C':A'B
+ BC: B':Z C':B'A
+ ABC: A':Z B':Z C':A'B'
+ D: D':Z
+ AD: A':Z D':Z
+ BD: B':Z D':B'
+ ABD: A':Z B':Z D':B'
+ CD: ABORT: cannot rebase 3:a35c07e8a2a4 without moving at least one of its parents
+ ACD: A':Z C':A'B D':Z
+ BCD: B':Z C':B'A D':B'
+ ABCD: A':Z B':Z C':A'B' D':B'
+
+Moving backwards
+
+ $ init <<'EOS'
+ > C
+ > |\
+ > A B
+ > |
+ > Z
+ > EOS
+ $ hg debugbruterebase 'all()-Z' Z
+ B: B':Z
+ A:
+ BA: B':Z
+ C: ABORT: cannot rebase 3:b8d7149b562b without moving at least one of its parents
+ BC: B':Z C':B'A
+ AC:
+ BAC: B':Z C':B'A
--- a/tests/test-rebase-collapse.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-rebase-collapse.t Sat Sep 30 07:52:48 2017 -0700
@@ -792,7 +792,7 @@
$ hg book foo
$ hg rebase -d 0 -r "1::2" --collapse -m collapsed
rebasing 1:6d8d9f24eec3 "a"
- rebasing 2:1cc73eca5ecc "b" (tip foo)
+ rebasing 2:1cc73eca5ecc "b" (foo tip)
saved backup bundle to $TESTTMP/collapseaddremove/.hg/strip-backup/6d8d9f24eec3-77d3b6e2-rebase.hg (glob)
$ hg log -G --template "{rev}: '{desc}' {bookmarks}"
@ 1: 'collapsed' foo
--- a/tests/test-rebase-conflicts.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-rebase-conflicts.t Sat Sep 30 07:52:48 2017 -0700
@@ -71,6 +71,21 @@
unresolved conflicts (see hg resolve, then hg rebase --continue)
[1]
+ $ hg status --config commands.status.verbose=1
+ M common
+ ? common.orig
+ # The repository is in an unfinished *rebase* state.
+
+ # Unresolved merge conflicts:
+ #
+ # common
+ #
+ # To mark files as resolved: hg resolve --mark FILE
+
+ # To continue: hg rebase --continue
+ # To abort: hg rebase --abort
+
+
Try to continue without solving the conflict:
$ hg rebase --continue
@@ -220,10 +235,6 @@
$ hg rebase -s9 -d2 --debug # use debug to really check merge base used
rebase onto 4bc80088dc6b starting from e31216eec445
rebase status stored
- ignoring null merge rebase of 3
- ignoring null merge rebase of 4
- ignoring null merge rebase of 6
- ignoring null merge rebase of 8
rebasing 9:e31216eec445 "more changes to f1"
future parents are 2 and -1
rebase status stored
@@ -385,7 +396,7 @@
$ hg update E -q
$ echo 3 > B
$ hg commit --amend -m E -A B -q
- $ hg rebase -r B+D -d . --config experimental.evolution=all
+ $ hg rebase -r B+D -d . --config experimental.stabilization=all
rebasing 1:112478962961 "B" (B)
merging B
warning: conflicts while merging B! (edit, then use 'hg resolve --mark')
@@ -396,9 +407,8 @@
$ hg resolve -m
(no more unresolved files)
continue: hg rebase --continue
- $ hg rebase --continue --config experimental.evolution=none
+ $ hg rebase --continue --config experimental.stabilization=none
rebasing 1:112478962961 "B" (B)
- not rebasing ignored 2:26805aba1e60 "C" (C)
rebasing 3:f585351a92f8 "D" (D)
warning: orphaned descendants detected, not stripping 112478962961
saved backup bundle to $TESTTMP/b/.hg/strip-backup/f585351a92f8-e536a9e4-rebase.hg (glob)
--- a/tests/test-rebase-dest.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-rebase-dest.t Sat Sep 30 07:52:48 2017 -0700
@@ -76,3 +76,378 @@
(use hg pull followed by hg rebase -d DEST)
[255]
+Setup rebase with multiple destinations
+
+ $ cd $TESTTMP
+
+ $ cat >> $TESTTMP/maprevset.py <<EOF
+ > from __future__ import absolute_import
+ > from mercurial import registrar, revset, revsetlang, smartset
+ > revsetpredicate = registrar.revsetpredicate()
+ > cache = {}
+ > @revsetpredicate('map')
+ > def map(repo, subset, x):
+ > """(set, mapping)"""
+ > setarg, maparg = revsetlang.getargs(x, 2, 2, '')
+ > rset = revset.getset(repo, smartset.fullreposet(repo), setarg)
+ > mapstr = revsetlang.getstring(maparg, '')
+ > map = dict(a.split(':') for a in mapstr.split(','))
+ > rev = rset.first()
+ > desc = repo[rev].description()
+ > newdesc = map.get(desc)
+ > if newdesc == 'null':
+ > revs = [-1]
+ > else:
+ > query = revsetlang.formatspec('desc(%s)', newdesc)
+ > revs = repo.revs(query)
+ > return smartset.baseset(revs)
+ > EOF
+
+ $ cat >> $HGRCPATH <<EOF
+ > [ui]
+ > allowemptycommit=1
+ > [extensions]
+ > drawdag=$TESTDIR/drawdag.py
+ > [phases]
+ > publish=False
+ > [alias]
+ > tglog = log -G --template "{rev}: {desc} {instabilities}" -r 'sort(all(), topo)'
+ > [extensions]
+ > maprevset=$TESTTMP/maprevset.py
+ > [experimental]
+ > rebase.multidest=true
+ > stabilization=all
+ > EOF
+
+ $ rebasewithdag() {
+ > N=`$PYTHON -c "print($N+1)"`
+ > hg init repo$N && cd repo$N
+ > hg debugdrawdag
+ > hg rebase "$@" > _rebasetmp
+ > r=$?
+ > grep -v 'saved backup bundle' _rebasetmp
+ > [ $r -eq 0 ] && rm -f .hg/localtags && hg tglog
+ > cd ..
+ > return $r
+ > }
+
+Destination resolves to an empty set:
+
+ $ rebasewithdag -s B -d 'SRC - SRC' <<'EOS'
+ > C
+ > |
+ > B
+ > |
+ > A
+ > EOS
+ nothing to rebase - empty destination
+ [1]
+
+Multiple destinations and --collapse are not compatible:
+
+ $ rebasewithdag -s C+E -d 'SRC^^' --collapse <<'EOS'
+ > C F
+ > | |
+ > B E
+ > | |
+ > A D
+ > EOS
+ abort: --collapse does not work with multiple destinations
+ [255]
+
+Multiple destinations cannot be used with --base:
+
+ $ rebasewithdag -b B+E -d 'SRC^^' --collapse <<'EOS'
+ > B E
+ > | |
+ > A D
+ > EOS
+ abort: unknown revision 'SRC'!
+ [255]
+
+Rebase to null should work:
+
+ $ rebasewithdag -r A+C+D -d 'null' <<'EOS'
+ > C D
+ > | |
+ > A B
+ > EOS
+ already rebased 0:426bada5c675 "A" (A)
+ already rebased 2:dc0947a82db8 "C" (C)
+ rebasing 3:004dc1679908 "D" (D tip)
+ o 4: D
+
+ o 2: C
+ |
+ | o 1: B
+ |
+ o 0: A
+
+Destination resolves to multiple changesets:
+
+ $ rebasewithdag -s B -d 'ALLSRC+SRC' <<'EOS'
+ > C
+ > |
+ > B
+ > |
+ > Z
+ > EOS
+ abort: rebase destination for f0a671a46792 is not unique
+ [255]
+
+Destination is an ancestor of source:
+
+ $ rebasewithdag -s B -d 'SRC' <<'EOS'
+ > C
+ > |
+ > B
+ > |
+ > Z
+ > EOS
+ abort: source and destination form a cycle
+ [255]
+
+Switch roots:
+
+ $ rebasewithdag -s 'all() - roots(all())' -d 'roots(all()) - ::SRC' <<'EOS'
+ > C F
+ > | |
+ > B E
+ > | |
+ > A D
+ > EOS
+ rebasing 2:112478962961 "B" (B)
+ rebasing 4:26805aba1e60 "C" (C)
+ rebasing 3:cd488e83d208 "E" (E)
+ rebasing 5:0069ba24938a "F" (F tip)
+ o 9: F
+ |
+ o 8: E
+ |
+ | o 7: C
+ | |
+ | o 6: B
+ | |
+ | o 1: D
+ |
+ o 0: A
+
+Different destinations for merge changesets with a same root:
+
+ $ rebasewithdag -s B -d '((parents(SRC)-B-A)::) - (::ALLSRC)' <<'EOS'
+ > C G
+ > |\|
+ > | F
+ > |
+ > B E
+ > |\|
+ > A D
+ > EOS
+ rebasing 3:a4256619d830 "B" (B)
+ rebasing 6:8e139e245220 "C" (C tip)
+ o 8: C
+ |\
+ | o 7: B
+ | |\
+ o | | 5: G
+ | | |
+ | | o 4: E
+ | | |
+ o | | 2: F
+ / /
+ | o 1: D
+ |
+ o 0: A
+
+Move to a previous parent:
+
+ $ rebasewithdag -s E+F+G -d 'SRC^^' <<'EOS'
+ > H
+ > |
+ > D G
+ > |/
+ > C F
+ > |/
+ > B E # E will be ignored, since E^^ is empty
+ > |/
+ > A
+ > EOS
+ rebasing 4:33441538d4aa "F" (F)
+ rebasing 6:cf43ad9da869 "G" (G)
+ rebasing 7:eef94f3b5f03 "H" (H tip)
+ o 10: H
+ |
+ | o 5: D
+ |/
+ o 3: C
+ |
+ | o 9: G
+ |/
+ o 1: B
+ |
+ | o 8: F
+ |/
+ | o 2: E
+ |/
+ o 0: A
+
+Source overlaps with destination:
+
+ $ rebasewithdag -s 'B+C+D' -d 'map(SRC, "B:C,C:D")' <<'EOS'
+ > B C D
+ > \|/
+ > A
+ > EOS
+ rebasing 2:dc0947a82db8 "C" (C)
+ rebasing 1:112478962961 "B" (B)
+ o 5: B
+ |
+ o 4: C
+ |
+ o 3: D
+ |
+ o 0: A
+
+Detect cycles early:
+
+ $ rebasewithdag -r 'all()-Z' -d 'map(SRC, "A:B,B:C,C:D,D:B")' <<'EOS'
+ > A B C
+ > \|/
+ > | D
+ > |/
+ > Z
+ > EOS
+ abort: source and destination form a cycle
+ [255]
+
+Detect source is ancestor of dest in runtime:
+
+ $ rebasewithdag -r 'C+B' -d 'map(SRC, "C:B,B:D")' -q <<'EOS'
+ > D
+ > |
+ > B C
+ > \|
+ > A
+ > EOS
+ abort: source is ancestor of destination
+ [255]
+
+"Already rebased" fast path still works:
+
+ $ rebasewithdag -r 'all()' -d 'SRC^' <<'EOS'
+ > E F
+ > /| |
+ > B C D
+ > \|/
+ > A
+ > EOS
+ already rebased 1:112478962961 "B" (B)
+ already rebased 2:dc0947a82db8 "C" (C)
+ already rebased 3:b18e25de2cf5 "D" (D)
+ already rebased 4:312782b8f06e "E" (E)
+ already rebased 5:ad6717a6a58e "F" (F tip)
+ o 5: F
+ |
+ o 3: D
+ |
+ | o 4: E
+ | |\
+ +---o 2: C
+ | |
+ | o 1: B
+ |/
+ o 0: A
+
+Massively rewrite the DAG:
+
+ $ rebasewithdag -r 'all()' -d 'map(SRC, "A:I,I:null,H:A,B:J,J:C,C:H,D:E,F:G,G:K,K:D,E:B")' <<'EOS'
+ > D G K
+ > | | |
+ > C F J
+ > | | |
+ > B E I
+ > \| |
+ > A H
+ > EOS
+ rebasing 4:701514e1408d "I" (I)
+ rebasing 0:426bada5c675 "A" (A)
+ rebasing 1:e7050b6e5048 "H" (H)
+ rebasing 5:26805aba1e60 "C" (C)
+ rebasing 7:cf89f86b485b "J" (J)
+ rebasing 2:112478962961 "B" (B)
+ rebasing 3:7fb047a69f22 "E" (E)
+ rebasing 8:f585351a92f8 "D" (D)
+ rebasing 10:ae41898d7875 "K" (K tip)
+ rebasing 9:711f53bbef0b "G" (G)
+ rebasing 6:64a8289d2492 "F" (F)
+ o 21: F
+ |
+ o 20: G
+ |
+ o 19: K
+ |
+ o 18: D
+ |
+ o 17: E
+ |
+ o 16: B
+ |
+ o 15: J
+ |
+ o 14: C
+ |
+ o 13: H
+ |
+ o 12: A
+ |
+ o 11: I
+
+Resolve instability:
+
+ $ rebasewithdag <<'EOF' -r 'orphan()-obsolete()' -d 'max((successors(max(roots(ALLSRC) & ::SRC)^)-obsolete())::)'
+ > F2
+ > |
+ > J E E2
+ > | |/
+ > I2 I | E3
+ > \| |/
+ > H | G
+ > | | |
+ > B2 D F
+ > | |/ # rebase: B -> B2
+ > N C # amend: E -> E2
+ > | | # amend: E2 -> E3
+ > M B # rebase: F -> F2
+ > \| # amend: I -> I2
+ > A
+ > EOF
+ rebasing 16:5c432343bf59 "J" (J tip)
+ rebasing 3:26805aba1e60 "C" (C)
+ rebasing 6:f585351a92f8 "D" (D)
+ rebasing 10:ffebc37c5d0b "E3" (E3)
+ rebasing 13:fb184bcfeee8 "F2" (F2)
+ rebasing 11:dc838ab4c0da "G" (G)
+ o 22: G
+ |
+ o 21: F2
+ |
+ o 20: E3
+ |
+ o 19: D
+ |
+ o 18: C
+ |
+ o 17: J
+ |
+ o 15: I2
+ |
+ o 12: H
+ |
+ o 5: B2
+ |
+ o 4: N
+ |
+ o 2: M
+ |
+ o 0: A
+
--- a/tests/test-rebase-emptycommit.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-rebase-emptycommit.t Sat Sep 30 07:52:48 2017 -0700
@@ -82,12 +82,12 @@
"BOOK-D", and "BOOK-E" include changes introduced by "C".
$ hg rebase -s 2 -d E
- rebasing 2:dc0947a82db8 "C" (C BOOK-C)
+ rebasing 2:dc0947a82db8 "C" (BOOK-C C)
rebasing 3:e7b3f00ed42e "D" (BOOK-D)
note: rebase of 3:e7b3f00ed42e created no changes to commit
rebasing 4:69a34c08022a "E" (BOOK-E)
note: rebase of 4:69a34c08022a created no changes to commit
- rebasing 5:6b2aeab91270 "F" (F BOOK-F)
+ rebasing 5:6b2aeab91270 "F" (BOOK-F F)
saved backup bundle to $TESTTMP/non-merge/.hg/strip-backup/dc0947a82db8-52bb4973-rebase.hg (glob)
$ hg log -G -T '{rev} {desc} {bookmarks}'
o 5 F BOOK-F
@@ -134,7 +134,7 @@
note: rebase of 2:dc0947a82db8 created no changes to commit
rebasing 3:b18e25de2cf5 "D" (BOOK-D)
note: rebase of 3:b18e25de2cf5 created no changes to commit
- rebasing 4:86a1f6686812 "E" (E BOOK-E)
+ rebasing 4:86a1f6686812 "E" (BOOK-E E)
note: rebase of 4:86a1f6686812 created no changes to commit
saved backup bundle to $TESTTMP/merge1/.hg/strip-backup/b18e25de2cf5-1fd0a4ba-rebase.hg (glob)
@@ -181,11 +181,11 @@
$ hg rebase -r '(A::)-(B::)-A' -d H
rebasing 2:dc0947a82db8 "C" (BOOK-C)
note: rebase of 2:dc0947a82db8 created no changes to commit
- rebasing 3:b18e25de2cf5 "D" (D BOOK-D)
- rebasing 4:03ca77807e91 "E" (E BOOK-E)
+ rebasing 3:b18e25de2cf5 "D" (BOOK-D D)
+ rebasing 4:03ca77807e91 "E" (BOOK-E E)
rebasing 5:ad6717a6a58e "F" (BOOK-F)
note: rebase of 5:ad6717a6a58e created no changes to commit
- rebasing 6:c58e8bdac1f4 "G" (G BOOK-G)
+ rebasing 6:c58e8bdac1f4 "G" (BOOK-G G)
saved backup bundle to $TESTTMP/merge2/.hg/strip-backup/b18e25de2cf5-2d487005-rebase.hg (glob)
$ hg log -G -T '{rev} {desc} {bookmarks}'
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-rebase-legacy.t Sat Sep 30 07:52:48 2017 -0700
@@ -0,0 +1,76 @@
+Test rebase --continue with rebasestate written by legacy client
+
+ $ cat >> $HGRCPATH <<EOF
+ > [extensions]
+ > rebase=
+ > drawdag=$TESTDIR/drawdag.py
+ > EOF
+
+ $ hg init
+ $ hg debugdrawdag <<'EOF'
+ > D H
+ > | |
+ > C G
+ > | |
+ > B F
+ > | |
+ > Z A E
+ > \|/
+ > R
+ > EOF
+
+rebasestate generated by a legacy client running "hg rebase -r B+D+E+G+H -d Z"
+
+ $ touch .hg/last-message.txt
+ $ cat > .hg/rebasestate <<EOF
+ > 0000000000000000000000000000000000000000
+ > f424eb6a8c01c4a0c0fba9f863f79b3eb5b4b69f
+ > 0000000000000000000000000000000000000000
+ > 0
+ > 0
+ > 0
+ >
+ > 21a6c45028857f500f56ae84fbf40689c429305b:-2
+ > de008c61a447fcfd93f808ef527d933a84048ce7:0000000000000000000000000000000000000000
+ > c1e6b162678d07d0b204e5c8267d51b4e03b633c:0000000000000000000000000000000000000000
+ > aeba276fcb7df8e10153a07ee728d5540693f5aa:-3
+ > bd5548558fcf354d37613005737a143871bf3723:-3
+ > d2fa1c02b2401b0e32867f26cce50818a4bd796a:0000000000000000000000000000000000000000
+ > 6f7a236de6852570cd54649ab62b1012bb78abc8:0000000000000000000000000000000000000000
+ > 6582e6951a9c48c236f746f186378e36f59f4928:0000000000000000000000000000000000000000
+ > EOF
+
+ $ hg rebase --continue
+ rebasing 4:c1e6b162678d "B" (B)
+ rebasing 8:6f7a236de685 "D" (D)
+ rebasing 2:de008c61a447 "E" (E)
+ rebasing 7:d2fa1c02b240 "G" (G)
+ rebasing 9:6582e6951a9c "H" (H tip)
+ warning: orphaned descendants detected, not stripping c1e6b162678d, de008c61a447
+ saved backup bundle to $TESTTMP/.hg/strip-backup/6f7a236de685-9880a3dc-rebase.hg (glob)
+
+ $ hg log -G -T '{rev}:{node|short} {desc}\n'
+ o 11:721b8da0a708 H
+ |
+ o 10:9d65695ec3c2 G
+ |
+ o 9:21c8397a5d68 E
+ |
+ | o 8:fc52970345e8 D
+ | |
+ | o 7:eac96551b107 B
+ |/
+ | o 6:bd5548558fcf C
+ | |
+ | | o 5:aeba276fcb7d F
+ | | |
+ | o | 4:c1e6b162678d B
+ | | |
+ o | | 3:f424eb6a8c01 Z
+ | | |
+ +---o 2:de008c61a447 E
+ | |
+ | o 1:21a6c4502885 A
+ |/
+ o 0:b41ce7760717 R
+
--- a/tests/test-rebase-named-branches.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-rebase-named-branches.t Sat Sep 30 07:52:48 2017 -0700
@@ -245,7 +245,7 @@
@ 0: 'A'
$ hg rebase -s 5 -d 6
- abort: source is ancestor of destination
+ abort: source and destination form a cycle
[255]
$ hg rebase -s 6 -d 5
--- a/tests/test-rebase-newancestor.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-rebase-newancestor.t Sat Sep 30 07:52:48 2017 -0700
@@ -3,7 +3,7 @@
> usegeneraldelta=yes
> [extensions]
> rebase=
- >
+ > drawdag=$TESTDIR/drawdag.py
> [alias]
> tglog = log -G --template "{rev}: '{desc}' {branches}\n"
> EOF
@@ -334,3 +334,93 @@
|/
o 0: 'common'
+Due to the limitation of 3-way merge algorithm (1 merge base), rebasing a merge
+may include unwanted content:
+
+ $ hg init $TESTTMP/dual-merge-base1
+ $ cd $TESTTMP/dual-merge-base1
+ $ hg debugdrawdag <<'EOS'
+ > F
+ > /|
+ > D E
+ > | |
+ > B C
+ > |/
+ > A Z
+ > |/
+ > R
+ > EOS
+ $ hg rebase -r D+E+F -d Z
+ rebasing 5:5f2c926dfecf "D" (D)
+ rebasing 6:b296604d9846 "E" (E)
+ rebasing 7:caa9781e507d "F" (F tip)
+ abort: rebasing 7:caa9781e507d will include unwanted changes from 4:d6003a550c2c or 3:c1e6b162678d
+ [255]
+
+The warning does not get printed if there is no unwanted change detected:
+
+ $ hg init $TESTTMP/dual-merge-base2
+ $ cd $TESTTMP/dual-merge-base2
+ $ hg debugdrawdag <<'EOS'
+ > D
+ > /|
+ > B C
+ > |/
+ > A Z
+ > |/
+ > R
+ > EOS
+ $ hg rebase -r B+C+D -d Z
+ rebasing 3:c1e6b162678d "B" (B)
+ rebasing 4:d6003a550c2c "C" (C)
+ rebasing 5:c8f78076273e "D" (D tip)
+ saved backup bundle to $TESTTMP/dual-merge-base2/.hg/strip-backup/d6003a550c2c-6f1424b6-rebase.hg (glob)
+ $ hg manifest -r 'desc(D)'
+ B
+ C
+ R
+ Z
+
+The merge base could be different from old p1 (changed parent becomes new p1):
+
+ $ hg init $TESTTMP/chosen-merge-base1
+ $ cd $TESTTMP/chosen-merge-base1
+ $ hg debugdrawdag <<'EOS'
+ > F
+ > /|
+ > D E
+ > | |
+ > B C Z
+ > EOS
+ $ hg rebase -r D+F -d Z
+ rebasing 3:004dc1679908 "D" (D)
+ rebasing 5:4be4cbf6f206 "F" (F tip)
+ saved backup bundle to $TESTTMP/chosen-merge-base1/.hg/strip-backup/004dc1679908-06a66a3c-rebase.hg (glob)
+ $ hg manifest -r 'desc(F)'
+ C
+ D
+ E
+ Z
+ $ hg log -r `hg log -r 'desc(F)' -T '{p1node}'` -T '{desc}\n'
+ D
+
+ $ hg init $TESTTMP/chosen-merge-base2
+ $ cd $TESTTMP/chosen-merge-base2
+ $ hg debugdrawdag <<'EOS'
+ > F
+ > /|
+ > D E
+ > | |
+ > B C Z
+ > EOS
+ $ hg rebase -r E+F -d Z
+ rebasing 4:974e4943c210 "E" (E)
+ rebasing 5:4be4cbf6f206 "F" (F tip)
+ saved backup bundle to $TESTTMP/chosen-merge-base2/.hg/strip-backup/974e4943c210-b2874da5-rebase.hg (glob)
+ $ hg manifest -r 'desc(F)'
+ B
+ D
+ E
+ Z
+ $ hg log -r `hg log -r 'desc(F)' -T '{p1node}'` -T '{desc}\n'
+ E
--- a/tests/test-rebase-obsolete.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-rebase-obsolete.t Sat Sep 30 07:52:48 2017 -0700
@@ -8,11 +8,12 @@
> [ui]
> logtemplate= {rev}:{node|short} {desc|firstline}
> [experimental]
- > evolution=createmarkers,allowunstable
+ > stabilization=createmarkers,allowunstable
> [phases]
> publish=False
> [extensions]
> rebase=
+ > drawdag=$TESTDIR/drawdag.py
> EOF
Setup rebase canonical repo
@@ -100,9 +101,9 @@
o 0:cd010b8cd998 A
$ hg debugobsolete
- 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 e4e5be0395b2cbd471ed22a26b1b6a1a0658a794 0 (*) {'user': 'test'} (glob)
- 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 2327fea05063f39961b14cb69435a9898dc9a245 0 (*) {'user': 'test'} (glob)
- 32af7686d403cf45b5d95f2d70cebea587ac806a 8eeb3c33ad33d452c89e5dcf611c347f978fb42b 0 (*) {'user': 'test'} (glob)
+ 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 e4e5be0395b2cbd471ed22a26b1b6a1a0658a794 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
+ 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 2327fea05063f39961b14cb69435a9898dc9a245 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
+ 32af7686d403cf45b5d95f2d70cebea587ac806a 8eeb3c33ad33d452c89e5dcf611c347f978fb42b 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
$ cd ..
@@ -170,9 +171,9 @@
o 0:cd010b8cd998 A
$ hg debugobsolete
- 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (*) {'user': 'test'} (glob)
- 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (*) {'user': 'test'} (glob)
- 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (*) {'user': 'test'} (glob)
+ 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
+ 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
+ 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
More complex case where part of the rebase set were already rebased
@@ -180,10 +181,10 @@
$ hg rebase --rev 'desc(D)' --dest 'desc(H)'
rebasing 9:08483444fef9 "D"
$ hg debugobsolete
- 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (*) {'user': 'test'} (glob)
- 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (*) {'user': 'test'} (glob)
- 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (*) {'user': 'test'} (glob)
- 08483444fef91d6224f6655ee586a65d263ad34c 4596109a6a4328c398bde3a4a3b6737cfade3003 0 (*) {'user': 'test'} (glob)
+ 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
+ 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
+ 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
+ 08483444fef91d6224f6655ee586a65d263ad34c 4596109a6a4328c398bde3a4a3b6737cfade3003 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
$ hg log -G
@ 11:4596109a6a43 D
|
@@ -205,16 +206,16 @@
$ hg rebase --source 'desc(B)' --dest 'tip' --config experimental.rebaseskipobsolete=True
rebasing 8:8877864f1edb "B"
- note: not rebasing 9:08483444fef9 "D", already in destination as 11:4596109a6a43 "D"
+ note: not rebasing 9:08483444fef9 "D", already in destination as 11:4596109a6a43 "D" (tip)
rebasing 10:5ae4c968c6ac "C"
$ hg debugobsolete
- 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (*) {'user': 'test'} (glob)
- 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (*) {'user': 'test'} (glob)
- 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (*) {'user': 'test'} (glob)
- 08483444fef91d6224f6655ee586a65d263ad34c 4596109a6a4328c398bde3a4a3b6737cfade3003 0 (*) {'user': 'test'} (glob)
- 8877864f1edb05d0e07dc4ba77b67a80a7b86672 462a34d07e599b87ea08676a449373fe4e2e1347 0 (*) {'user': 'test'} (glob)
- 5ae4c968c6aca831df823664e706c9d4aa34473d 98f6af4ee9539e14da4465128f894c274900b6e5 0 (*) {'user': 'test'} (glob)
- $ hg log --rev 'divergent()'
+ 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
+ 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
+ 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
+ 08483444fef91d6224f6655ee586a65d263ad34c 4596109a6a4328c398bde3a4a3b6737cfade3003 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
+ 8877864f1edb05d0e07dc4ba77b67a80a7b86672 462a34d07e599b87ea08676a449373fe4e2e1347 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
+ 5ae4c968c6aca831df823664e706c9d4aa34473d 98f6af4ee9539e14da4465128f894c274900b6e5 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
+ $ hg log --rev 'contentdivergent()'
$ hg log -G
o 13:98f6af4ee953 C
|
@@ -349,9 +350,9 @@
$ hg id --debug -r tip
4dc2197e807bae9817f09905b50ab288be2dbbcf tip
$ hg debugobsolete
- 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (*) {'user': 'test'} (glob)
- 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (*) {'user': 'test'} (glob)
- 32af7686d403cf45b5d95f2d70cebea587ac806a 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (*) {'user': 'test'} (glob)
+ 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
+ 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
+ 32af7686d403cf45b5d95f2d70cebea587ac806a 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
$ cd ..
@@ -411,9 +412,9 @@
o 0:cd010b8cd998 A
$ hg debugobsolete
- 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b e273c5e7d2d29df783dce9f9eaa3ac4adc69c15d 0 (*) {'user': 'test'} (glob)
- 32af7686d403cf45b5d95f2d70cebea587ac806a cf44d2f5a9f4297a62be94cbdd3dff7c7dc54258 0 (*) {'user': 'test'} (glob)
- 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 7c6027df6a99d93f461868e5433f63bde20b6dfb 0 (*) {'user': 'test'} (glob)
+ 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b e273c5e7d2d29df783dce9f9eaa3ac4adc69c15d 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
+ 32af7686d403cf45b5d95f2d70cebea587ac806a cf44d2f5a9f4297a62be94cbdd3dff7c7dc54258 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
+ 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 7c6027df6a99d93f461868e5433f63bde20b6dfb 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
Test that rewriting leaving instability behind is allowed
---------------------------------------------------------------------
@@ -449,7 +450,6 @@
$ hg rebase --dest 4 --rev '7+11+9'
rebasing 9:cf44d2f5a9f4 "D"
rebasing 7:02de42196ebe "H"
- not rebasing ignored 10:7c6027df6a99 "B"
rebasing 11:0d8f238b634c "C" (tip)
$ hg log -G
o 14:1e8370e38cca C
@@ -472,6 +472,30 @@
$ cd ..
+Detach both parents
+
+ $ hg init double-detach
+ $ cd double-detach
+
+ $ hg debugdrawdag <<EOF
+ > F
+ > /|
+ > C E
+ > | |
+ > B D G
+ > \|/
+ > A
+ > EOF
+
+ $ hg rebase -d G -r 'B + D + F'
+ rebasing 1:112478962961 "B" (B)
+ rebasing 2:b18e25de2cf5 "D" (D)
+ rebasing 6:f15c3adaf214 "F" (F tip)
+ abort: cannot rebase 6:f15c3adaf214 without moving at least one of its parents
+ [255]
+
+ $ cd ..
+
test on rebase dropping a merge
(setup)
@@ -519,7 +543,6 @@
$ hg rebase --dest 6 --rev '((desc(H) + desc(D))::) - desc(M)'
rebasing 3:32af7686d403 "D"
rebasing 7:02de42196ebe "H"
- not rebasing ignored 8:53a6a128b2b7 "M"
rebasing 9:4bde274eefcf "I" (tip)
$ hg log -G
@ 12:acd174b7ab39 I
@@ -603,11 +626,11 @@
$ hg add M
$ hg commit --amend -m "M"
$ hg log -G
- @ 20:bfaedf8eb73b M
+ @ 18:bfaedf8eb73b M
|
- | o 18:97219452e4bd L
+ | o 17:97219452e4bd L
| |
- | x 17:fc37a630c901 K
+ | x 16:fc37a630c901 K
|/
| o 15:5ae8a643467b J
| |
@@ -637,8 +660,8 @@
|/
o 0:cd010b8cd998 A
- $ hg rebase -s 14 -d 18 --config experimental.rebaseskipobsolete=True
- note: not rebasing 14:9ad579b4a5de "I", already in destination as 17:fc37a630c901 "K"
+ $ hg rebase -s 14 -d 17 --config experimental.rebaseskipobsolete=True
+ note: not rebasing 14:9ad579b4a5de "I", already in destination as 16:fc37a630c901 "K"
rebasing 15:5ae8a643467b "J"
$ cd ..
@@ -710,7 +733,7 @@
|
o 0:4a2df7238c3b A
- $ hg debugobsolete `hg log -r 7 -T '{node}\n'` --config experimental.evolution=all
+ $ hg debugobsolete `hg log -r 7 -T '{node}\n'` --config experimental.stabilization=all
obsoleted 1 changesets
$ hg rebase -d 6 -r "4::"
rebasing 4:ff2c4d47b71d "C"
@@ -738,7 +761,7 @@
$ hg add nonrelevant
$ hg commit -m nonrelevant
created new head
- $ hg debugobsolete `hg log -r 11 -T '{node}\n'` --config experimental.evolution=all
+ $ hg debugobsolete `hg log -r 11 -T '{node}\n'` --config experimental.stabilization=all
obsoleted 1 changesets
$ hg rebase -r . -d 10
note: not rebasing 11:f44da1f4954c "nonrelevant" (tip), it has no successor
@@ -746,10 +769,8 @@
If a rebase is going to create divergence, it should abort
$ hg log -G
- @ 11:f44da1f4954c nonrelevant
+ @ 10:121d9e3bc4c6 P
|
- | o 10:121d9e3bc4c6 P
- |/
o 9:4be60e099a77 C
|
o 6:9c48361117de D
@@ -776,9 +797,9 @@
$ hg add foo
$ hg commit -m "bar foo"
$ hg log -G
- @ 15:73568ab6879d bar foo
+ @ 14:73568ab6879d bar foo
|
- | o 14:77d874d096a2 10'
+ | o 13:77d874d096a2 10'
| |
| | o 12:3eb461388009 john doe
| |/
@@ -793,21 +814,21 @@
o 0:4a2df7238c3b A
$ hg summary
- parent: 15:73568ab6879d tip (unstable)
+ parent: 14:73568ab6879d tip (orphan)
bar foo
branch: default
commit: (clean)
update: 2 new changesets, 3 branch heads (merge)
phases: 8 draft
- unstable: 1 changesets
+ orphan: 1 changesets
$ hg rebase -s 10 -d 12
abort: this rebase will cause divergences from: 121d9e3bc4c6
(to force the rebase please set experimental.allowdivergence=True)
[255]
$ hg log -G
- @ 15:73568ab6879d bar foo
+ @ 14:73568ab6879d bar foo
|
- | o 14:77d874d096a2 10'
+ | o 13:77d874d096a2 10'
| |
| | o 12:3eb461388009 john doe
| |/
@@ -825,21 +846,21 @@
$ hg rebase -s 10 -d 12 --config experimental.allowdivergence=True
rebasing 10:121d9e3bc4c6 "P"
- rebasing 15:73568ab6879d "bar foo" (tip)
+ rebasing 14:73568ab6879d "bar foo" (tip)
$ hg summary
- parent: 17:61bd55f69bc4 tip
+ parent: 16:61bd55f69bc4 tip
bar foo
branch: default
commit: (clean)
update: 1 new changesets, 2 branch heads (merge)
phases: 8 draft
- divergent: 2 changesets
+ content-divergent: 2 changesets
rebase --continue + skipped rev because their successors are in destination
we make a change in trunk and work on conflicting changes to make rebase abort.
- $ hg log -G -r 17::
- @ 17:61bd55f69bc4 bar foo
+ $ hg log -G -r 16::
+ @ 16:61bd55f69bc4 bar foo
|
~
@@ -852,7 +873,7 @@
$ hg commit -m "dummy change successor"
Create the changes that we will rebase
- $ hg update -C 17 -q
+ $ hg update -C 16 -q
$ printf "b" > willconflict
$ hg add willconflict
$ hg commit -m "willconflict second version"
@@ -863,25 +884,25 @@
$ printf "dummy" > L
$ hg add L
$ hg commit -m "dummy change"
- $ hg debugobsolete `hg log -r ".^" -T '{node}'` `hg log -r 19 -T '{node}'` --config experimental.evolution=all
+ $ hg debugobsolete `hg log -r ".^" -T '{node}'` `hg log -r 18 -T '{node}'` --config experimental.stabilization=all
obsoleted 1 changesets
- $ hg log -G -r 17::
- @ 22:7bdc8a87673d dummy change
+ $ hg log -G -r 16::
+ @ 21:7bdc8a87673d dummy change
|
- x 21:8b31da3c4919 dummy change
+ x 20:8b31da3c4919 dummy change
|
- o 20:b82fb57ea638 willconflict second version
+ o 19:b82fb57ea638 willconflict second version
|
- | o 19:601db7a18f51 dummy change successor
+ | o 18:601db7a18f51 dummy change successor
| |
- | o 18:357ddf1602d5 willconflict first version
+ | o 17:357ddf1602d5 willconflict first version
|/
- o 17:61bd55f69bc4 bar foo
+ o 16:61bd55f69bc4 bar foo
|
~
- $ hg rebase -r ".^^ + .^ + ." -d 19
- rebasing 20:b82fb57ea638 "willconflict second version"
+ $ hg rebase -r ".^^ + .^ + ." -d 18
+ rebasing 19:b82fb57ea638 "willconflict second version"
merging willconflict
warning: conflicts while merging willconflict! (edit, then use 'hg resolve --mark')
unresolved conflicts (see hg resolve, then hg rebase --continue)
@@ -891,68 +912,298 @@
(no more unresolved files)
continue: hg rebase --continue
$ hg rebase --continue
- rebasing 20:b82fb57ea638 "willconflict second version"
- note: not rebasing 21:8b31da3c4919 "dummy change", already in destination as 19:601db7a18f51 "dummy change successor"
- rebasing 22:7bdc8a87673d "dummy change" (tip)
+ rebasing 19:b82fb57ea638 "willconflict second version"
+ note: not rebasing 20:8b31da3c4919 "dummy change", already in destination as 18:601db7a18f51 "dummy change successor"
+ rebasing 21:7bdc8a87673d "dummy change" (tip)
+ $ cd ..
+
+Rebase merge where successor of one parent is equal to destination (issue5198)
+
+ $ hg init p1-succ-is-dest
+ $ cd p1-succ-is-dest
+
+ $ hg debugdrawdag <<EOF
+ > F
+ > /|
+ > E D B # replace: D -> B
+ > \|/
+ > A
+ > EOF
+
+ $ hg rebase -d B -s D
+ note: not rebasing 2:b18e25de2cf5 "D" (D), already in destination as 1:112478962961 "B" (B)
+ rebasing 4:66f1a38021c9 "F" (F tip)
+ $ hg log -G
+ o 5:50e9d60b99c6 F
+ |\
+ | | x 4:66f1a38021c9 F
+ | |/|
+ | o | 3:7fb047a69f22 E
+ | | |
+ | | x 2:b18e25de2cf5 D
+ | |/
+ o | 1:112478962961 B
+ |/
+ o 0:426bada5c675 A
+
+ $ cd ..
+
+Rebase merge where successor of other parent is equal to destination
+
+ $ hg init p2-succ-is-dest
+ $ cd p2-succ-is-dest
+
+ $ hg debugdrawdag <<EOF
+ > F
+ > /|
+ > E D B # replace: E -> B
+ > \|/
+ > A
+ > EOF
+
+ $ hg rebase -d B -s E
+ note: not rebasing 3:7fb047a69f22 "E" (E), already in destination as 1:112478962961 "B" (B)
+ rebasing 4:66f1a38021c9 "F" (F tip)
+ $ hg log -G
+ o 5:aae1787dacee F
+ |\
+ | | x 4:66f1a38021c9 F
+ | |/|
+ | | x 3:7fb047a69f22 E
+ | | |
+ | o | 2:b18e25de2cf5 D
+ | |/
+ o / 1:112478962961 B
+ |/
+ o 0:426bada5c675 A
+
+ $ cd ..
+
+Rebase merge where successor of one parent is ancestor of destination
+
+ $ hg init p1-succ-in-dest
+ $ cd p1-succ-in-dest
+
+ $ hg debugdrawdag <<EOF
+ > F C
+ > /| |
+ > E D B # replace: D -> B
+ > \|/
+ > A
+ > EOF
+
+ $ hg rebase -d C -s D
+ note: not rebasing 2:b18e25de2cf5 "D" (D), already in destination as 1:112478962961 "B" (B)
+ rebasing 5:66f1a38021c9 "F" (F tip)
+
+ $ hg log -G
+ o 6:0913febf6439 F
+ |\
+ +---x 5:66f1a38021c9 F
+ | | |
+ | o | 4:26805aba1e60 C
+ | | |
+ o | | 3:7fb047a69f22 E
+ | | |
+ +---x 2:b18e25de2cf5 D
+ | |
+ | o 1:112478962961 B
+ |/
+ o 0:426bada5c675 A
+
+ $ cd ..
+
+Rebase merge where successor of other parent is ancestor of destination
+
+ $ hg init p2-succ-in-dest
+ $ cd p2-succ-in-dest
+
+ $ hg debugdrawdag <<EOF
+ > F C
+ > /| |
+ > E D B # replace: E -> B
+ > \|/
+ > A
+ > EOF
+
+ $ hg rebase -d C -s E
+ note: not rebasing 3:7fb047a69f22 "E" (E), already in destination as 1:112478962961 "B" (B)
+ rebasing 5:66f1a38021c9 "F" (F tip)
+ $ hg log -G
+ o 6:c6ab0cc6d220 F
+ |\
+ +---x 5:66f1a38021c9 F
+ | | |
+ | o | 4:26805aba1e60 C
+ | | |
+ | | x 3:7fb047a69f22 E
+ | | |
+ o---+ 2:b18e25de2cf5 D
+ / /
+ o / 1:112478962961 B
+ |/
+ o 0:426bada5c675 A
+
$ cd ..
-rebase source is obsoleted (issue5198)
----------------------------------
+Rebase merge where successor of one parent is ancestor of destination
+
+ $ hg init p1-succ-in-dest-b
+ $ cd p1-succ-in-dest-b
+
+ $ hg debugdrawdag <<EOF
+ > F C
+ > /| |
+ > E D B # replace: E -> B
+ > \|/
+ > A
+ > EOF
- $ hg clone base amended
- updating to branch default
- 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
- $ cd amended
- $ hg up 9520eea781bc
- 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
- $ echo 1 >> E
- $ hg commit --amend -m "E'" -d "0 0"
+ $ hg rebase -d C -b F
+ rebasing 2:b18e25de2cf5 "D" (D)
+ note: not rebasing 3:7fb047a69f22 "E" (E), already in destination as 1:112478962961 "B" (B)
+ rebasing 5:66f1a38021c9 "F" (F tip)
+ note: rebase of 5:66f1a38021c9 created no changes to commit
$ hg log -G
- @ 9:69abe8906104 E'
+ o 6:8f47515dda15 D
|
- | o 7:02de42196ebe H
- | |
- | | o 6:eea13746799a G
- | |/|
- | o | 5:24b6387c8c8c F
- |/ /
- | x 4:9520eea781bc E
+ | x 5:66f1a38021c9 F
+ | |\
+ o | | 4:26805aba1e60 C
+ | | |
+ | | x 3:7fb047a69f22 E
+ | | |
+ | x | 2:b18e25de2cf5 D
+ | |/
+ o / 1:112478962961 B
+ |/
+ o 0:426bada5c675 A
+
+ $ cd ..
+
+Rebase merge where successor of other parent is ancestor of destination
+
+ $ hg init p2-succ-in-dest-b
+ $ cd p2-succ-in-dest-b
+
+ $ hg debugdrawdag <<EOF
+ > F C
+ > /| |
+ > E D B # replace: D -> B
+ > \|/
+ > A
+ > EOF
+
+ $ hg rebase -d C -b F
+ note: not rebasing 2:b18e25de2cf5 "D" (D), already in destination as 1:112478962961 "B" (B)
+ rebasing 3:7fb047a69f22 "E" (E)
+ rebasing 5:66f1a38021c9 "F" (F tip)
+ note: rebase of 5:66f1a38021c9 created no changes to commit
+
+ $ hg log -G
+ o 6:533690786a86 E
+ |
+ | x 5:66f1a38021c9 F
+ | |\
+ o | | 4:26805aba1e60 C
+ | | |
+ | | x 3:7fb047a69f22 E
+ | | |
+ | x | 2:b18e25de2cf5 D
+ | |/
+ o / 1:112478962961 B
|/
- | o 3:32af7686d403 D
- | |
- | o 2:5fddd98957c8 C
- | |
- | o 1:42ccdea3bb16 B
- |/
- o 0:cd010b8cd998 A
+ o 0:426bada5c675 A
- $ hg rebase -d . -s 9520eea781bc
- note: not rebasing 4:9520eea781bc "E", already in destination as 9:69abe8906104 "E'"
- rebasing 6:eea13746799a "G"
+ $ cd ..
+
+Rebase merge where both parents have successors in destination
+
+ $ hg init p12-succ-in-dest
+ $ cd p12-succ-in-dest
+ $ hg debugdrawdag <<'EOS'
+ > E F
+ > /| /| # replace: A -> C
+ > A B C D # replace: B -> D
+ > | |
+ > X Y
+ > EOS
+ $ hg rebase -r A+B+E -d F
+ note: not rebasing 4:a3d17304151f "A" (A), already in destination as 0:96cc3511f894 "C" (C)
+ note: not rebasing 5:b23a2cc00842 "B" (B), already in destination as 1:058c1e1fb10a "D" (D)
+ rebasing 7:dac5d11c5a7d "E" (E tip)
+ abort: rebasing 7:dac5d11c5a7d will include unwanted changes from 3:59c792af609c, 5:b23a2cc00842 or 2:ba2b7fa7166d, 4:a3d17304151f
+ [255]
+ $ cd ..
+
+Rebase a non-clean merge. One parent has successor in destination, the other
+parent moves as requested.
+
+ $ hg init p1-succ-p2-move
+ $ cd p1-succ-p2-move
+ $ hg debugdrawdag <<'EOS'
+ > D Z
+ > /| | # replace: A -> C
+ > A B C # D/D = D
+ > EOS
+ $ hg rebase -r A+B+D -d Z
+ note: not rebasing 0:426bada5c675 "A" (A), already in destination as 2:96cc3511f894 "C" (C)
+ rebasing 1:fc2b737bb2e5 "B" (B)
+ rebasing 3:b8ed089c80ad "D" (D)
+
+ $ rm .hg/localtags
$ hg log -G
- o 10:17be06e82e95 G
- |\
- | @ 9:69abe8906104 E'
- | |
- +---o 7:02de42196ebe H
- | |
- o | 5:24b6387c8c8c F
- |/
- | o 3:32af7686d403 D
- | |
- | o 2:5fddd98957c8 C
- | |
- | o 1:42ccdea3bb16 B
- |/
- o 0:cd010b8cd998 A
+ o 6:e4f78693cc88 D
+ |
+ o 5:76840d832e98 B
+ |
+ o 4:50e41c1f3950 Z
+ |
+ o 2:96cc3511f894 C
+ $ hg files -r tip
+ B
+ C
+ D
+ Z
+
+ $ cd ..
+
+ $ hg init p1-move-p2-succ
+ $ cd p1-move-p2-succ
+ $ hg debugdrawdag <<'EOS'
+ > D Z
+ > /| | # replace: B -> C
+ > A B C # D/D = D
+ > EOS
+ $ hg rebase -r B+A+D -d Z
+ rebasing 0:426bada5c675 "A" (A)
+ note: not rebasing 1:fc2b737bb2e5 "B" (B), already in destination as 2:96cc3511f894 "C" (C)
+ rebasing 3:b8ed089c80ad "D" (D)
+
+ $ rm .hg/localtags
+ $ hg log -G
+ o 6:1b355ed94d82 D
+ |
+ o 5:a81a74d764a6 A
+ |
+ o 4:50e41c1f3950 Z
+ |
+ o 2:96cc3511f894 C
+
+ $ hg files -r tip
+ A
+ C
+ D
+ Z
+
$ cd ..
Test that bookmark is moved and working dir is updated when all changesets have
equivalents in destination
$ hg init rbsrepo && cd rbsrepo
$ echo "[experimental]" > .hg/hgrc
- $ echo "evolution=all" >> .hg/hgrc
+ $ echo "stabilization=all" >> .hg/hgrc
$ echo "rebaseskipobsolete=on" >> .hg/hgrc
$ echo root > root && hg ci -Am root
adding root
@@ -971,19 +1222,152 @@
$ hg up 2 && hg log -r . # working dir is at rev 2 again
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
2:1e9a3c00cbe9 b (no-eol)
- $ hg rebase -r 2 -d 3 --config experimental.evolution.track-operation=1
- note: not rebasing 2:1e9a3c00cbe9 "b" (mybook), already in destination as 3:be1832deae9a "b"
-Check that working directory was updated to rev 3 although rev 2 was skipped
-during the rebase operation
+ $ hg rebase -r 2 -d 3 --config experimental.stabilization.track-operation=1
+ note: not rebasing 2:1e9a3c00cbe9 "b" (mybook), already in destination as 3:be1832deae9a "b" (tip)
+Check that working directory and bookmark was updated to rev 3 although rev 2
+was skipped
$ hg log -r .
3:be1832deae9a b (no-eol)
+ $ hg bookmarks
+ mybook 3:be1832deae9a
+ $ hg debugobsolete --rev tip
+ 1e9a3c00cbe90d236ac05ef61efcc5e40b7412bc be1832deae9ac531caa7438b8dcf6055a122cd8e 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'rebase', 'user': 'test'}
-Check that bookmark was not moved to rev 3 if rev 2 was skipped during the
-rebase operation. This makes sense because if rev 2 has a successor, the
-operation generating that successor (ex. rebase) should be responsible for
-moving bookmarks. If the bookmark is on a precursor, like rev 2, that means the
-user manually moved it back. In that case we should not move it again.
- $ hg bookmarks
- mybook 2:1e9a3c00cbe9
- $ hg debugobsolete --rev tip
- 1e9a3c00cbe90d236ac05ef61efcc5e40b7412bc be1832deae9ac531caa7438b8dcf6055a122cd8e 0 (*) {'user': 'test'} (glob)
+Obsoleted working parent and bookmark could be moved if an ancestor of working
+parent gets moved:
+
+ $ hg init $TESTTMP/ancestor-wd-move
+ $ cd $TESTTMP/ancestor-wd-move
+ $ hg debugdrawdag <<'EOS'
+ > E D1 # rebase: D1 -> D2
+ > | |
+ > | C
+ > D2 |
+ > | B
+ > |/
+ > A
+ > EOS
+ $ hg update D1 -q
+ $ hg bookmark book -i
+ $ hg rebase -r B+D1 -d E
+ rebasing 1:112478962961 "B" (B)
+ note: not rebasing 5:15ecf15e0114 "D1" (book D1 tip), already in destination as 2:0807738e0be9 "D2" (D2)
+ $ hg log -G -T '{desc} {bookmarks}'
+ @ B book
+ |
+ | x D1
+ | |
+ o | E
+ | |
+ | o C
+ | |
+ o | D2
+ | |
+ | x B
+ |/
+ o A
+
+Rebasing a merge with one of its parent having a hidden successor
+
+ $ hg init $TESTTMP/merge-p1-hidden-successor
+ $ cd $TESTTMP/merge-p1-hidden-successor
+
+ $ hg debugdrawdag <<'EOS'
+ > E
+ > |
+ > B3 B2 # amend: B1 -> B2 -> B3
+ > |/ # B2 is hidden
+ > | D
+ > | |\
+ > | B1 C
+ > |/
+ > A
+ > EOS
+
+ $ eval `hg tags -T '{tag}={node}\n'`
+ $ rm .hg/localtags
+
+ $ hg rebase -r $D -d $E
+ rebasing 5:9e62094e4d94 "D"
+
+ $ hg log -G
+ o 7:a699d059adcf D
+ |\
+ | o 6:ecc93090a95c E
+ | |
+ | o 4:0dc878468a23 B3
+ | |
+ o | 1:96cc3511f894 C
+ /
+ o 0:426bada5c675 A
+
+For some reasons (--hidden, rebaseskipobsolete=0, directaccess, etc.),
+rebasestate may contain hidden hashes. "rebase --abort" should work regardless.
+
+ $ hg init $TESTTMP/hidden-state1
+ $ cd $TESTTMP/hidden-state1
+ $ cat >> .hg/hgrc <<EOF
+ > [experimental]
+ > rebaseskipobsolete=0
+ > EOF
+
+ $ hg debugdrawdag <<'EOS'
+ > C
+ > |
+ > D B # prune: B, C
+ > |/ # B/D=B
+ > A
+ > EOS
+
+ $ eval `hg tags -T '{tag}={node}\n'`
+ $ rm .hg/localtags
+
+ $ hg update -q $C --hidden
+ $ hg rebase -s $B -d $D
+ rebasing 1:2ec65233581b "B"
+ merging D
+ warning: conflicts while merging D! (edit, then use 'hg resolve --mark')
+ unresolved conflicts (see hg resolve, then hg rebase --continue)
+ [1]
+
+ $ cp -R . $TESTTMP/hidden-state2
+
+ $ hg log -G
+ @ 2:b18e25de2cf5 D
+ |
+ | @ 1:2ec65233581b B
+ |/
+ o 0:426bada5c675 A
+
+ $ hg summary
+ parent: 2:b18e25de2cf5 tip
+ D
+ parent: 1:2ec65233581b (obsolete)
+ B
+ branch: default
+ commit: 2 modified, 1 unknown, 1 unresolved (merge)
+ update: (current)
+ phases: 3 draft
+ rebase: 0 rebased, 2 remaining (rebase --continue)
+
+ $ hg rebase --abort
+ rebase aborted
+
+Also test --continue for the above case
+
+ $ cd $TESTTMP/hidden-state2
+ $ hg resolve -m
+ (no more unresolved files)
+ continue: hg rebase --continue
+ $ hg rebase --continue
+ rebasing 1:2ec65233581b "B"
+ rebasing 3:7829726be4dc "C" (tip)
+ $ hg log -G
+ @ 5:1964d5d5b547 C
+ |
+ o 4:68deb90c12a2 B
+ |
+ o 2:b18e25de2cf5 D
+ |
+ o 0:426bada5c675 A
+
--- a/tests/test-rebase-partial.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-rebase-partial.t Sat Sep 30 07:52:48 2017 -0700
@@ -7,7 +7,7 @@
> drawdag=$TESTDIR/drawdag.py
>
> [experimental]
- > evolution=createmarkers,allowunstable
+ > stabilization=createmarkers,allowunstable
>
> [alias]
> tglog = log -G --template "{rev}: {desc}"
@@ -81,7 +81,6 @@
> A
> EOF
already rebased 1:112478962961 "B" (B)
- not rebasing ignored 2:26805aba1e60 "C" (C)
rebasing 3:f585351a92f8 "D" (D tip)
o 4: D
|
--- a/tests/test-rebase-scenario-global.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-rebase-scenario-global.t Sat Sep 30 07:52:48 2017 -0700
@@ -264,14 +264,14 @@
F onto G - rebase onto a descendant:
$ hg rebase -s 5 -d 6
- abort: source is ancestor of destination
+ abort: source and destination form a cycle
[255]
G onto B - merge revision with both parents not in ancestors of target:
$ hg rebase -s 6 -d 1
rebasing 6:eea13746799a "G"
- abort: cannot use revision 6 as base, result would have 3 parents
+ abort: cannot rebase 6:eea13746799a without moving at least one of its parents
[255]
$ hg rebase --abort
rebase aborted
@@ -966,7 +966,7 @@
> [extensions]
> wraprebase=$TESTTMP/wraprebase.py
> [experimental]
- > evolution=all
+ > stabilization=all
> EOF
$ hg debugdrawdag <<'EOS'
--- a/tests/test-record.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-record.t Sat Sep 30 07:52:48 2017 -0700
@@ -62,6 +62,7 @@
-w --ignore-all-space ignore white space when comparing lines
-b --ignore-space-change ignore changes in the amount of white space
-B --ignore-blank-lines ignore changes whose lines are all blank
+ -Z --ignore-space-at-eol ignore changes in whitespace at EOL
(some details hidden, use --verbose to show complete help)
--- a/tests/test-releasenotes-formatting.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-releasenotes-formatting.t Sat Sep 30 07:52:48 2017 -0700
@@ -1,3 +1,5 @@
+#require fuzzywuzzy
+
$ cat >> $HGRCPATH << EOF
> [extensions]
> releasenotes=
@@ -376,3 +378,59 @@
* Adds a new feature.
+ $ cd ..
+
+Testing output for the --check (-c) flag
+
+ $ hg init check-flag
+ $ cd check-flag
+
+ $ touch a
+ $ hg -q commit -A -l - << EOF
+ > .. asf::
+ >
+ > First paragraph under this admonition.
+ > EOF
+
+Suggest similar admonition in place of the invalid one.
+
+ $ hg releasenotes -r . -c
+ Invalid admonition 'asf' present in changeset 4026fe9e1c20
+
+ $ touch b
+ $ hg -q commit -A -l - << EOF
+ > .. fixes::
+ >
+ > First paragraph under this admonition.
+ > EOF
+
+ $ hg releasenotes -r . -c
+ Invalid admonition 'fixes' present in changeset 0e7130d2705c
+ (did you mean fix?)
+
+ $ cd ..
+
+Usage of --list flag
+
+ $ hg init relnotes-list
+ $ cd relnotes-list
+ $ hg releasenotes -l
+ feature: New Features
+ bc: Backwards Compatibility Changes
+ fix: Bug Fixes
+ perf: Performance Improvements
+ api: API Changes
+
+ $ cd ..
+
+Raise error on simultaneous usage of flags
+
+ $ hg init relnotes-raise-error
+ $ cd relnotes-raise-error
+ $ hg releasenotes -r . -l
+ abort: cannot use both '--list' and '--rev'
+ [255]
+
+ $ hg releasenotes -l -c
+ abort: cannot use both '--list' and '--check'
+ [255]
--- a/tests/test-releasenotes-merging.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-releasenotes-merging.t Sat Sep 30 07:52:48 2017 -0700
@@ -1,3 +1,5 @@
+#require fuzzywuzzy
+
$ cat >> $HGRCPATH << EOF
> [extensions]
> releasenotes=
@@ -158,3 +160,122 @@
* this is fix3.
+ $ cd ..
+
+Ignores commit messages containing issueNNNN based on issue number.
+
+ $ hg init simple-fuzzrepo
+ $ cd simple-fuzzrepo
+ $ touch fix1
+ $ hg -q commit -A -l - << EOF
+ > commit 1
+ >
+ > .. fix::
+ >
+ > Resolved issue4567.
+ > EOF
+
+ $ cat >> $TESTTMP/issue-number-notes << EOF
+ > Bug Fixes
+ > =========
+ >
+ > * Fixed issue1234 related to XYZ.
+ >
+ > * Fixed issue4567 related to ABC.
+ >
+ > * Fixed issue3986 related to PQR.
+ > EOF
+
+ $ hg releasenotes -r . $TESTTMP/issue-number-notes
+ "issue4567" already exists in notes; ignoring
+
+ $ cat $TESTTMP/issue-number-notes
+ Bug Fixes
+ =========
+
+ * Fixed issue1234 related to XYZ.
+
+ * Fixed issue4567 related to ABC.
+
+ * Fixed issue3986 related to PQR.
+
+ $ cd ..
+
+Adds short commit messages (words < 10) without
+comparison unless there is an exact match.
+
+ $ hg init tempdir
+ $ cd tempdir
+ $ touch feature1
+ $ hg -q commit -A -l - << EOF
+ > commit 1
+ >
+ > .. feature::
+ >
+ > Adds a new feature 1.
+ > EOF
+
+ $ hg releasenotes -r . $TESTTMP/short-sentence-notes
+
+ $ touch feature2
+ $ hg -q commit -A -l - << EOF
+ > commit 2
+ >
+ > .. feature::
+ >
+ > Adds a new feature 2.
+ > EOF
+
+ $ hg releasenotes -r . $TESTTMP/short-sentence-notes
+ $ cat $TESTTMP/short-sentence-notes
+ New Features
+ ============
+
+ * Adds a new feature 1.
+
+ * Adds a new feature 2.
+
+ $ cd ..
+
+Ignores commit messages based on fuzzy comparison.
+
+ $ hg init fuzznotes
+ $ cd fuzznotes
+ $ touch fix1
+ $ hg -q commit -A -l - << EOF
+ > commit 1
+ >
+ > .. fix::
+ >
+ > This is a fix with another line.
+ > And it is a big one.
+ > EOF
+
+ $ cat >> $TESTTMP/fuzz-ignore-notes << EOF
+ > Bug Fixes
+ > =========
+ >
+ > * Fixed issue4567 by improving X.
+ >
+ > * This is the first line. This is next line with one newline.
+ >
+ > This is another line written after two newlines. This is going to be a big one.
+ >
+ > * This fixes another problem.
+ > EOF
+
+ $ hg releasenotes -r . $TESTTMP/fuzz-ignore-notes
+ "This is a fix with another line. And it is a big one." already exists in notes file; ignoring
+
+ $ cat $TESTTMP/fuzz-ignore-notes
+ Bug Fixes
+ =========
+
+ * Fixed issue4567 by improving X.
+
+ * This is the first line. This is next line with one newline.
+
+ This is another line written after two newlines. This is going to be a big
+ one.
+
+ * This fixes another problem.
--- a/tests/test-releasenotes-parsing.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-releasenotes-parsing.t Sat Sep 30 07:52:48 2017 -0700
@@ -1,3 +1,5 @@
+#require fuzzywuzzy
+
$ cat >> $HGRCPATH << EOF
> [extensions]
> releasenotes=
--- a/tests/test-relink.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-relink.t Sat Sep 30 07:52:48 2017 -0700
@@ -8,13 +8,15 @@
> }
$ cat > arelinked.py <<EOF
- > import sys, os
+ > from __future__ import absolute_import, print_function
+ > import os
+ > import sys
> from mercurial import util
> path1, path2 = sys.argv[1:3]
> if util.samefile(path1, path2):
- > print '%s == %s' % (path1, path2)
+ > print('%s == %s' % (path1, path2))
> else:
- > print '%s != %s' % (path1, path2)
+ > print('%s != %s' % (path1, path2))
> EOF
--- a/tests/test-rename.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-rename.t Sat Sep 30 07:52:48 2017 -0700
@@ -12,7 +12,7 @@
$ hg rename d1/d11/a1 d2/c
$ hg --config ui.portablefilenames=abort rename d1/a d1/con.xml
- abort: filename contains 'con', which is reserved on Windows: 'd1/con.xml'
+ abort: filename contains 'con', which is reserved on Windows: d1/con.xml
[255]
$ hg sum
parent: 0:9b4b6e7b2c26 tip
--- a/tests/test-repair-strip.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-repair-strip.t Sat Sep 30 07:52:48 2017 -0700
@@ -4,7 +4,7 @@
> import sys
> for entry in sys.stdin.read().split('\n'):
> if entry:
- > print entry.split('\x00')[0]
+ > print(entry.split('\x00')[0])
> EOF
$ echo "[extensions]" >> $HGRCPATH
--- a/tests/test-requires.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-requires.t Sat Sep 30 07:52:48 2017 -0700
@@ -32,7 +32,8 @@
$ echo 'featuresetup-test' >> supported/.hg/requires
$ cat > $TESTTMP/supported-locally/supportlocally.py <<EOF
- > from mercurial import localrepo, extensions
+ > from __future__ import absolute_import
+ > from mercurial import extensions, localrepo
> def featuresetup(ui, supported):
> for name, module in extensions.extensions(ui):
> if __name__ == module.__name__:
--- a/tests/test-resolve.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-resolve.t Sat Sep 30 07:52:48 2017 -0700
@@ -255,8 +255,8 @@
warning: conflicts while merging file2! (edit, then use 'hg resolve --mark')
[1]
$ ls .hg/origbackups
- file1.orig
- file2.orig
+ file1
+ file2
$ grep '<<<' file1 > /dev/null
$ grep '<<<' file2 > /dev/null
--- a/tests/test-revert.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-revert.t Sat Sep 30 07:52:48 2017 -0700
@@ -92,7 +92,7 @@
$ echo z > e
$ hg revert --all -v --config 'ui.origbackuppath=.hg/origbackups'
creating directory: $TESTTMP/repo/.hg/origbackups (glob)
- saving current version of e as $TESTTMP/repo/.hg/origbackups/e.orig (glob)
+ saving current version of e as $TESTTMP/repo/.hg/origbackups/e (glob)
reverting e
$ rm -rf .hg/origbackups
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-revlog-mmapindex.t Sat Sep 30 07:52:48 2017 -0700
@@ -0,0 +1,54 @@
+create verbosemmap.py
+ $ cat << EOF > verbosemmap.py
+ > # extension to make util.mmapread verbose
+ >
+ > from __future__ import absolute_import
+ >
+ > from mercurial import (
+ > extensions,
+ > util,
+ > )
+ >
+ > def mmapread(orig, fp):
+ > print "mmapping %s" % fp.name
+ > return orig(fp)
+ >
+ > def extsetup(ui):
+ > extensions.wrapfunction(util, 'mmapread', mmapread)
+ > EOF
+
+setting up base repo
+ $ hg init a
+ $ cd a
+ $ touch a
+ $ hg add a
+ $ hg commit -qm base
+ $ for i in `$TESTDIR/seq.py 1 100` ; do
+ > echo $i > a
+ > hg commit -qm $i
+ > done
+
+set up verbosemmap extension
+ $ cat << EOF >> $HGRCPATH
+ > [extensions]
+ > verbosemmap=$TESTTMP/verbosemmap.py
+ > EOF
+
+mmap index which is now more than 4k long
+ $ hg log -l 5 -T '{rev}\n' --config experimental.mmapindexthreshold=4k
+ mmapping $TESTTMP/a/.hg/store/00changelog.i (glob)
+ 100
+ 99
+ 98
+ 97
+ 96
+
+do not mmap index which is still less than 32k
+ $ hg log -l 5 -T '{rev}\n' --config experimental.mmapindexthreshold=32k
+ 100
+ 99
+ 98
+ 97
+ 96
+
+ $ cd ..
--- a/tests/test-revlog-raw.py Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-revlog-raw.py Sat Sep 30 07:52:48 2017 -0700
@@ -119,11 +119,27 @@
'deltabase': rlog.node(deltaparent),
'delta': rlog.revdiff(deltaparent, r)}
+ def deltaiter(self):
+ chain = None
+ for chunkdata in iter(lambda: self.deltachunk(chain), {}):
+ node = chunkdata['node']
+ p1 = chunkdata['p1']
+ p2 = chunkdata['p2']
+ cs = chunkdata['cs']
+ deltabase = chunkdata['deltabase']
+ delta = chunkdata['delta']
+ flags = chunkdata['flags']
+
+ chain = node
+
+ yield (node, p1, p2, cs, deltabase, delta, flags)
+
def linkmap(lnode):
return rlog.rev(lnode)
dlog = newrevlog(destname, recreate=True)
- dlog.addgroup(dummychangegroup(), linkmap, tr)
+ dummydeltas = dummychangegroup().deltaiter()
+ dlog.addgroup(dummydeltas, linkmap, tr)
return dlog
def lowlevelcopy(rlog, tr, destname=b'_destrevlog.i'):
--- a/tests/test-revset-dirstate-parents.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-revset-dirstate-parents.t Sat Sep 30 07:52:48 2017 -0700
@@ -14,19 +14,19 @@
$ try 'p1()'
(func
- ('symbol', 'p1')
+ (symbol 'p1')
None)
* set:
<baseset []>
$ try 'p2()'
(func
- ('symbol', 'p2')
+ (symbol 'p2')
None)
* set:
<baseset []>
$ try 'parents()'
(func
- ('symbol', 'parents')
+ (symbol 'parents')
None)
* set:
<baseset+ []>
--- a/tests/test-revset.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-revset.t Sat Sep 30 07:52:48 2017 -0700
@@ -155,8 +155,8 @@
$ try 0:1
(range
- ('symbol', '0')
- ('symbol', '1'))
+ (symbol '0')
+ (symbol '1'))
* set:
<spanset+ 0:2>
0
@@ -166,8 +166,7 @@
None)
* optimized:
(rangeall
- None
- define)
+ None)
* set:
<spanset+ 0:10>
0
@@ -182,8 +181,8 @@
9
$ try 3::6
(dagrange
- ('symbol', '3')
- ('symbol', '6'))
+ (symbol '3')
+ (symbol '6'))
* set:
<baseset+ [3, 5, 6]>
3
@@ -192,9 +191,9 @@
$ try '0|1|2'
(or
(list
- ('symbol', '0')
- ('symbol', '1')
- ('symbol', '2')))
+ (symbol '0')
+ (symbol '1')
+ (symbol '2')))
* set:
<baseset [0, 1, 2]>
0
@@ -204,14 +203,14 @@
names that should work without quoting
$ try a
- ('symbol', 'a')
+ (symbol 'a')
* set:
<baseset [0]>
0
$ try b-a
(minus
- ('symbol', 'b')
- ('symbol', 'a'))
+ (symbol 'b')
+ (symbol 'a'))
* set:
<filteredset
<baseset [1]>,
@@ -219,14 +218,14 @@
<baseset [0]>>>
1
$ try _a_b_c_
- ('symbol', '_a_b_c_')
+ (symbol '_a_b_c_')
* set:
<baseset [6]>
6
$ try _a_b_c_-a
(minus
- ('symbol', '_a_b_c_')
- ('symbol', 'a'))
+ (symbol '_a_b_c_')
+ (symbol 'a'))
* set:
<filteredset
<baseset [6]>,
@@ -234,14 +233,14 @@
<baseset [0]>>>
6
$ try .a.b.c.
- ('symbol', '.a.b.c.')
+ (symbol '.a.b.c.')
* set:
<baseset [7]>
7
$ try .a.b.c.-a
(minus
- ('symbol', '.a.b.c.')
- ('symbol', 'a'))
+ (symbol '.a.b.c.')
+ (symbol 'a'))
* set:
<filteredset
<baseset [7]>,
@@ -252,20 +251,20 @@
names that should be caught by fallback mechanism
$ try -- '-a-b-c-'
- ('symbol', '-a-b-c-')
+ (symbol '-a-b-c-')
* set:
<baseset [4]>
4
$ log -a-b-c-
4
$ try '+a+b+c+'
- ('symbol', '+a+b+c+')
+ (symbol '+a+b+c+')
* set:
<baseset [3]>
3
$ try '+a+b+c+:'
(rangepost
- ('symbol', '+a+b+c+'))
+ (symbol '+a+b+c+'))
* set:
<spanset+ 3:10>
3
@@ -277,7 +276,7 @@
9
$ try ':+a+b+c+'
(rangepre
- ('symbol', '+a+b+c+'))
+ (symbol '+a+b+c+'))
* set:
<spanset+ 0:4>
0
@@ -286,8 +285,8 @@
3
$ try -- '-a-b-c-:+a+b+c+'
(range
- ('symbol', '-a-b-c-')
- ('symbol', '+a+b+c+'))
+ (symbol '-a-b-c-')
+ (symbol '+a+b+c+'))
* set:
<spanset- 3:5>
4
@@ -301,15 +300,15 @@
(minus
(minus
(negate
- ('symbol', 'a'))
- ('symbol', 'b'))
- ('symbol', 'c'))
+ (symbol 'a'))
+ (symbol 'b'))
+ (symbol 'c'))
(negate
- ('symbol', 'a')))
+ (symbol 'a')))
abort: unknown revision '-a'!
[255]
$ try é
- ('symbol', '\xc3\xa9')
+ (symbol '\xc3\xa9')
* set:
<baseset [9]>
9
@@ -325,8 +324,8 @@
$ try '"-a-b-c-"-a'
(minus
- ('string', '-a-b-c-')
- ('symbol', 'a'))
+ (string '-a-b-c-')
+ (symbol 'a'))
* set:
<filteredset
<baseset [4]>,
@@ -346,9 +345,9 @@
(or
(list
(and
- ('symbol', '1')
- ('symbol', '2'))
- ('symbol', '3')))
+ (symbol '1')
+ (symbol '2'))
+ (symbol '3')))
* set:
<addset
<baseset []>,
@@ -357,10 +356,10 @@
$ try '1|2&3'
(or
(list
- ('symbol', '1')
+ (symbol '1')
(and
- ('symbol', '2')
- ('symbol', '3'))))
+ (symbol '2')
+ (symbol '3'))))
* set:
<addset
<baseset [1]>,
@@ -369,20 +368,20 @@
$ try '1&2&3' # associativity
(and
(and
- ('symbol', '1')
- ('symbol', '2'))
- ('symbol', '3'))
+ (symbol '1')
+ (symbol '2'))
+ (symbol '3'))
* set:
<baseset []>
$ try '1|(2|3)'
(or
(list
- ('symbol', '1')
+ (symbol '1')
(group
(or
(list
- ('symbol', '2')
- ('symbol', '3'))))))
+ (symbol '2')
+ (symbol '3'))))))
* set:
<addset
<baseset [1]>,
@@ -472,11 +471,11 @@
$ try 'foo=bar|baz'
(keyvalue
- ('symbol', 'foo')
+ (symbol 'foo')
(or
(list
- ('symbol', 'bar')
- ('symbol', 'baz'))))
+ (symbol 'bar')
+ (symbol 'baz'))))
hg: parse error: can't use a key-value pair in this context
[255]
@@ -484,19 +483,18 @@
$ try --optimize 'foo=(not public())'
(keyvalue
- ('symbol', 'foo')
+ (symbol 'foo')
(group
(not
(func
- ('symbol', 'public')
+ (symbol 'public')
None))))
* optimized:
(keyvalue
- ('symbol', 'foo')
+ (symbol 'foo')
(func
- ('symbol', '_notpublic')
- None
- any))
+ (symbol '_notpublic')
+ None))
hg: parse error: can't use a key-value pair in this context
[255]
@@ -505,13 +503,13 @@
$ hg debugrevspec -p parsed 'tip:tip^#generations[-1]'
* parsed:
(range
- ('symbol', 'tip')
+ (symbol 'tip')
(relsubscript
(parentpost
- ('symbol', 'tip'))
- ('symbol', 'generations')
+ (symbol 'tip'))
+ (symbol 'generations')
(negate
- ('symbol', '1'))))
+ (symbol '1'))))
9
8
7
@@ -524,10 +522,10 @@
(not
(relsubscript
(func
- ('symbol', 'public')
+ (symbol 'public')
None)
- ('symbol', 'generations')
- ('symbol', '0')))
+ (symbol 'generations')
+ (symbol '0')))
left-hand side of relation-subscript operator should be optimized recursively:
@@ -537,41 +535,34 @@
(relsubscript
(not
(func
- ('symbol', 'public')
- None
- any)
- define)
- ('symbol', 'generations')
- ('symbol', '0')
- define)
+ (symbol 'public')
+ None))
+ (symbol 'generations')
+ (symbol '0'))
* optimized:
(relsubscript
(func
- ('symbol', '_notpublic')
- None
- any)
- ('symbol', 'generations')
- ('symbol', '0')
- define)
+ (symbol '_notpublic')
+ None)
+ (symbol 'generations')
+ (symbol '0'))
resolution of subscript and relation-subscript ternary operators:
$ hg debugrevspec -p analyzed 'tip[0]'
* analyzed:
(subscript
- ('symbol', 'tip')
- ('symbol', '0')
- define)
+ (symbol 'tip')
+ (symbol '0'))
hg: parse error: can't use a subscript in this context
[255]
$ hg debugrevspec -p analyzed 'tip#rel[0]'
* analyzed:
(relsubscript
- ('symbol', 'tip')
- ('symbol', 'rel')
- ('symbol', '0')
- define)
+ (symbol 'tip')
+ (symbol 'rel')
+ (symbol '0'))
hg: parse error: unknown identifier: rel
[255]
@@ -579,11 +570,9 @@
* analyzed:
(subscript
(relation
- ('symbol', 'tip')
- ('symbol', 'rel')
- define)
- ('symbol', '0')
- define)
+ (symbol 'tip')
+ (symbol 'rel'))
+ (symbol '0'))
hg: parse error: can't use a subscript in this context
[255]
@@ -591,12 +580,10 @@
* analyzed:
(subscript
(relsubscript
- ('symbol', 'tip')
- ('symbol', 'rel')
- ('symbol', '0')
- define)
- ('symbol', '1')
- define)
+ (symbol 'tip')
+ (symbol 'rel')
+ (symbol '0'))
+ (symbol '1'))
hg: parse error: can't use a subscript in this context
[255]
@@ -604,12 +591,10 @@
* analyzed:
(relsubscript
(relation
- ('symbol', 'tip')
- ('symbol', 'rel0')
- define)
- ('symbol', 'rel1')
- ('symbol', '1')
- define)
+ (symbol 'tip')
+ (symbol 'rel0'))
+ (symbol 'rel1')
+ (symbol '1'))
hg: parse error: unknown identifier: rel1
[255]
@@ -617,13 +602,11 @@
* analyzed:
(relsubscript
(relsubscript
- ('symbol', 'tip')
- ('symbol', 'rel0')
- ('symbol', '0')
- define)
- ('symbol', 'rel1')
- ('symbol', '1')
- define)
+ (symbol 'tip')
+ (symbol 'rel0')
+ (symbol '0'))
+ (symbol 'rel1')
+ (symbol '1'))
hg: parse error: unknown identifier: rel1
[255]
@@ -692,28 +675,23 @@
(group
(or
(list
- ('symbol', '0')
- ('symbol', '1'))))
- ('symbol', '1'))
+ (symbol '0')
+ (symbol '1'))))
+ (symbol '1'))
* analyzed:
(and
(or
(list
- ('symbol', '0')
- ('symbol', '1'))
- define)
+ (symbol '0')
+ (symbol '1')))
(not
- ('symbol', '1')
- follow)
- define)
+ (symbol '1')))
* optimized:
(difference
(func
- ('symbol', '_list')
- ('string', '0\x001')
- define)
- ('symbol', '1')
- define)
+ (symbol '_list')
+ (string '0\x001'))
+ (symbol '1'))
0
$ hg debugrevspec -p unknown '0'
@@ -732,19 +710,15 @@
* analyzed:
(and
(func
- ('symbol', 'r3232')
- None
- define)
- ('symbol', '2')
- define)
+ (symbol 'r3232')
+ None)
+ (symbol '2'))
* optimized:
- (and
- ('symbol', '2')
+ (andsmally
(func
- ('symbol', 'r3232')
- None
- define)
- define)
+ (symbol 'r3232')
+ None)
+ (symbol '2'))
* analyzed set:
<baseset [2]>
* optimized set:
@@ -776,8 +750,7 @@
None)
* analyzed:
(rangeall
- None
- define)
+ None)
* set:
<spanset+ 0:10>
0
@@ -793,8 +766,7 @@
$ try -p analyzed ':1'
* analyzed:
(rangepre
- ('symbol', '1')
- define)
+ (symbol '1'))
* set:
<spanset+ 0:2>
0
@@ -804,10 +776,8 @@
(rangepre
(or
(list
- ('symbol', '1')
- ('symbol', '2'))
- define)
- define)
+ (symbol '1')
+ (symbol '2'))))
* set:
<spanset+ 0:3>
0
@@ -817,10 +787,8 @@
* analyzed:
(rangepre
(and
- ('symbol', '1')
- ('symbol', '2')
- define)
- define)
+ (symbol '1')
+ (symbol '2')))
* set:
<baseset []>
@@ -831,8 +799,8 @@
$ try '1^:2'
(range
(parentpost
- ('symbol', '1'))
- ('symbol', '2'))
+ (symbol '1'))
+ (symbol '2'))
* set:
<spanset+ 0:3>
0
@@ -842,8 +810,8 @@
$ try '1^::2'
(dagrange
(parentpost
- ('symbol', '1'))
- ('symbol', '2'))
+ (symbol '1'))
+ (symbol '2'))
* set:
<baseset+ [0, 1, 2]>
0
@@ -853,7 +821,7 @@
$ try '9^:'
(rangepost
(parentpost
- ('symbol', '9')))
+ (symbol '9')))
* set:
<spanset+ 8:10>
8
@@ -863,10 +831,10 @@
$ try '1^(:2)'
(parent
- ('symbol', '1')
+ (symbol '1')
(group
(rangepre
- ('symbol', '2'))))
+ (symbol '2'))))
hg: parse error: ^ expects a number 0, 1, or 2
[255]
@@ -874,11 +842,11 @@
$ try 'sort(1^:2)'
(func
- ('symbol', 'sort')
+ (symbol 'sort')
(range
(parentpost
- ('symbol', '1'))
- ('symbol', '2')))
+ (symbol '1'))
+ (symbol '2')))
* set:
<spanset+ 0:3>
0
@@ -891,9 +859,9 @@
(group
(range
(parentpost
- ('symbol', '3'))
- ('symbol', '4'))))
- ('symbol', '2'))
+ (symbol '3'))
+ (symbol '4'))))
+ (symbol '2'))
* set:
<spanset+ 0:3>
0
@@ -906,9 +874,9 @@
(group
(dagrange
(parentpost
- ('symbol', '3'))
- ('symbol', '4'))))
- ('symbol', '2'))
+ (symbol '3'))
+ (symbol '4'))))
+ (symbol '2'))
* set:
<baseset+ [0, 1, 2]>
0
@@ -921,7 +889,7 @@
(group
(rangepost
(parentpost
- ('symbol', '9'))))))
+ (symbol '9'))))))
* set:
<spanset+ 4:10>
4
@@ -934,12 +902,12 @@
x^ in alias should also be resolved
$ try 'A' --config 'revsetalias.A=1^:2'
- ('symbol', 'A')
+ (symbol 'A')
* expanded:
(range
(parentpost
- ('symbol', '1'))
- ('symbol', '2'))
+ (symbol '1'))
+ (symbol '2'))
* set:
<spanset+ 0:3>
0
@@ -948,13 +916,13 @@
$ try 'A:2' --config 'revsetalias.A=1^'
(range
- ('symbol', 'A')
- ('symbol', '2'))
+ (symbol 'A')
+ (symbol '2'))
* expanded:
(range
(parentpost
- ('symbol', '1'))
- ('symbol', '2'))
+ (symbol '1'))
+ (symbol '2'))
* set:
<spanset+ 0:3>
0
@@ -966,13 +934,13 @@
$ try '1^A' --config 'revsetalias.A=:2'
(parent
- ('symbol', '1')
- ('symbol', 'A'))
+ (symbol '1')
+ (symbol 'A'))
* expanded:
(parent
- ('symbol', '1')
+ (symbol '1')
(rangepre
- ('symbol', '2')))
+ (symbol '2')))
hg: parse error: ^ expects a number 0, 1, or 2
[255]
@@ -1200,11 +1168,11 @@
* parsed:
(relsubscript
(func
- ('symbol', 'roots')
+ (symbol 'roots')
(rangeall
None))
- ('symbol', 'g')
- ('symbol', '2'))
+ (symbol 'g')
+ (symbol '2'))
2
3
@@ -1302,22 +1270,22 @@
6
$ try 'grep("(")' # invalid regular expression
(func
- ('symbol', 'grep')
- ('string', '('))
+ (symbol 'grep')
+ (string '('))
hg: parse error: invalid match pattern: unbalanced parenthesis
[255]
$ try 'grep("\bissue\d+")'
(func
- ('symbol', 'grep')
- ('string', '\x08issue\\d+'))
+ (symbol 'grep')
+ (string '\x08issue\\d+'))
* set:
<filteredset
<fullreposet+ 0:10>,
<grep '\x08issue\\d+'>>
$ try 'grep(r"\bissue\d+")'
(func
- ('symbol', 'grep')
- ('string', '\\bissue\\d+'))
+ (symbol 'grep')
+ (string '\\bissue\\d+'))
* set:
<filteredset
<fullreposet+ 0:10>,
@@ -1634,20 +1602,17 @@
(onlypost
(minus
(range
- ('symbol', '8')
- ('symbol', '9'))
- ('symbol', '8')))
+ (symbol '8')
+ (symbol '9'))
+ (symbol '8')))
* optimized:
(func
- ('symbol', 'only')
+ (symbol 'only')
(difference
(range
- ('symbol', '8')
- ('symbol', '9')
- define)
- ('symbol', '8')
- define)
- define)
+ (symbol '8')
+ (symbol '9'))
+ (symbol '8')))
* set:
<baseset+ [8, 9]>
8
@@ -1655,16 +1620,15 @@
$ try --optimize '(9)%(5)'
(only
(group
- ('symbol', '9'))
+ (symbol '9'))
(group
- ('symbol', '5')))
+ (symbol '5')))
* optimized:
(func
- ('symbol', 'only')
+ (symbol 'only')
(list
- ('symbol', '9')
- ('symbol', '5'))
- define)
+ (symbol '9')
+ (symbol '5')))
* set:
<baseset+ [2, 4, 8, 9]>
2
@@ -1830,7 +1794,7 @@
$ cd wdir-hashcollision
$ cat <<EOF >> .hg/hgrc
> [experimental]
- > evolution = createmarkers
+ > stabilization = createmarkers
> EOF
$ echo 0 > a
$ hg ci -qAm 0
@@ -1998,19 +1962,14 @@
(difference
(and
(range
- ('symbol', '3')
- ('symbol', '0')
- define)
+ (symbol '3')
+ (symbol '0'))
(range
- ('symbol', '0')
- ('symbol', '3')
- follow)
- define)
+ (symbol '0')
+ (symbol '3')))
(range
- ('symbol', '2')
- ('symbol', '1')
- any)
- define)
+ (symbol '2')
+ (symbol '1')))
* set:
<filteredset
<filteredset
@@ -2027,25 +1986,22 @@
$ try --optimize '2:0 & (0 + 1 + 2)'
(and
(range
- ('symbol', '2')
- ('symbol', '0'))
+ (symbol '2')
+ (symbol '0'))
(group
(or
(list
- ('symbol', '0')
- ('symbol', '1')
- ('symbol', '2')))))
+ (symbol '0')
+ (symbol '1')
+ (symbol '2')))))
* optimized:
(and
(range
- ('symbol', '2')
- ('symbol', '0')
- define)
+ (symbol '2')
+ (symbol '0'))
(func
- ('symbol', '_list')
- ('string', '0\x001\x002')
- follow)
- define)
+ (symbol '_list')
+ (string '0\x001\x002')))
* set:
<filteredset
<spanset- 0:3>,
@@ -2059,36 +2015,32 @@
$ try --optimize '2:0 & (0:1 + 2)'
(and
(range
- ('symbol', '2')
- ('symbol', '0'))
+ (symbol '2')
+ (symbol '0'))
(group
(or
(list
(range
- ('symbol', '0')
- ('symbol', '1'))
- ('symbol', '2')))))
+ (symbol '0')
+ (symbol '1'))
+ (symbol '2')))))
* optimized:
(and
(range
- ('symbol', '2')
- ('symbol', '0')
- define)
+ (symbol '2')
+ (symbol '0'))
(or
(list
- ('symbol', '2')
(range
- ('symbol', '0')
- ('symbol', '1')
- follow))
- follow)
- define)
+ (symbol '0')
+ (symbol '1'))
+ (symbol '2'))))
* set:
<filteredset
<spanset- 0:3>,
<addset
- <baseset [2]>,
- <spanset+ 0:2>>>
+ <spanset+ 0:2>,
+ <baseset [2]>>>
2
1
0
@@ -2098,22 +2050,19 @@
$ trylist --optimize '2:0 & %ld' 0 1 2
(and
(range
- ('symbol', '2')
- ('symbol', '0'))
- (func
- ('symbol', '_intlist')
- ('string', '0\x001\x002')))
- * optimized:
- (and
+ (symbol '2')
+ (symbol '0'))
(func
- ('symbol', '_intlist')
- ('string', '0\x001\x002')
- follow)
+ (symbol '_intlist')
+ (string '0\x001\x002')))
+ * optimized:
+ (andsmally
(range
- ('symbol', '2')
- ('symbol', '0')
- define)
- define)
+ (symbol '2')
+ (symbol '0'))
+ (func
+ (symbol '_intlist')
+ (string '0\x001\x002')))
* set:
<filteredset
<spanset- 0:3>,
@@ -2125,22 +2074,19 @@
$ trylist --optimize '%ld & 2:0' 0 2 1
(and
(func
- ('symbol', '_intlist')
- ('string', '0\x002\x001'))
+ (symbol '_intlist')
+ (string '0\x002\x001'))
(range
- ('symbol', '2')
- ('symbol', '0')))
+ (symbol '2')
+ (symbol '0')))
* optimized:
(and
(func
- ('symbol', '_intlist')
- ('string', '0\x002\x001')
- define)
+ (symbol '_intlist')
+ (string '0\x002\x001'))
(range
- ('symbol', '2')
- ('symbol', '0')
- follow)
- define)
+ (symbol '2')
+ (symbol '0')))
* set:
<filteredset
<baseset [0, 2, 1]>,
@@ -2154,22 +2100,19 @@
$ trylist --optimize --bin '2:0 & %ln' `hg log -T '{node} ' -r0:2`
(and
(range
- ('symbol', '2')
- ('symbol', '0'))
+ (symbol '2')
+ (symbol '0'))
(func
- ('symbol', '_hexlist')
- ('string', '*'))) (glob)
+ (symbol '_hexlist')
+ (string '*'))) (glob)
* optimized:
(and
(range
- ('symbol', '2')
- ('symbol', '0')
- define)
+ (symbol '2')
+ (symbol '0'))
(func
- ('symbol', '_hexlist')
- ('string', '*') (glob)
- follow)
- define)
+ (symbol '_hexlist')
+ (string '*'))) (glob)
* set:
<filteredset
<spanset- 0:3>,
@@ -2181,22 +2124,19 @@
$ trylist --optimize --bin '%ln & 2:0' `hg log -T '{node} ' -r0+2+1`
(and
(func
- ('symbol', '_hexlist')
- ('string', '*')) (glob)
- (range
- ('symbol', '2')
- ('symbol', '0')))
- * optimized:
- (and
+ (symbol '_hexlist')
+ (string '*')) (glob)
(range
- ('symbol', '2')
- ('symbol', '0')
- follow)
+ (symbol '2')
+ (symbol '0')))
+ * optimized:
+ (andsmally
(func
- ('symbol', '_hexlist')
- ('string', '*') (glob)
- define)
- define)
+ (symbol '_hexlist')
+ (string '*')) (glob)
+ (range
+ (symbol '2')
+ (symbol '0')))
* set:
<baseset [0, 2, 1]>
0
@@ -2210,14 +2150,11 @@
* optimized:
(difference
(range
- ('symbol', '2')
- ('symbol', '0')
- define)
+ (symbol '2')
+ (symbol '0'))
(func
- ('symbol', '_list')
- ('string', '0\x001')
- any)
- define)
+ (symbol '_list')
+ (string '0\x001')))
* set:
<filteredset
<spanset- 0:3>,
@@ -2229,20 +2166,15 @@
* optimized:
(difference
(range
- ('symbol', '2')
- ('symbol', '0')
- define)
+ (symbol '2')
+ (symbol '0'))
(and
(range
- ('symbol', '0')
- ('symbol', '2')
- any)
+ (symbol '0')
+ (symbol '2'))
(func
- ('symbol', '_list')
- ('string', '0\x001')
- any)
- any)
- define)
+ (symbol '_list')
+ (string '0\x001'))))
* set:
<filteredset
<spanset- 0:3>,
@@ -2256,12 +2188,10 @@
$ try -p optimized 'present(2 + 0 + 1)'
* optimized:
(func
- ('symbol', 'present')
+ (symbol 'present')
(func
- ('symbol', '_list')
- ('string', '2\x000\x001')
- define)
- define)
+ (symbol '_list')
+ (string '2\x000\x001')))
* set:
<baseset [2, 0, 1]>
2
@@ -2271,29 +2201,25 @@
$ try --optimize '2:0 & present(0 + 1 + 2)'
(and
(range
- ('symbol', '2')
- ('symbol', '0'))
+ (symbol '2')
+ (symbol '0'))
(func
- ('symbol', 'present')
+ (symbol 'present')
(or
(list
- ('symbol', '0')
- ('symbol', '1')
- ('symbol', '2')))))
+ (symbol '0')
+ (symbol '1')
+ (symbol '2')))))
* optimized:
(and
(range
- ('symbol', '2')
- ('symbol', '0')
- define)
+ (symbol '2')
+ (symbol '0'))
(func
- ('symbol', 'present')
+ (symbol 'present')
(func
- ('symbol', '_list')
- ('string', '0\x001\x002')
- follow)
- follow)
- define)
+ (symbol '_list')
+ (string '0\x001\x002'))))
* set:
<filteredset
<spanset- 0:3>,
@@ -2307,27 +2233,23 @@
$ try --optimize '0:2 & reverse(all())'
(and
(range
- ('symbol', '0')
- ('symbol', '2'))
+ (symbol '0')
+ (symbol '2'))
(func
- ('symbol', 'reverse')
+ (symbol 'reverse')
(func
- ('symbol', 'all')
+ (symbol 'all')
None)))
* optimized:
(and
(range
- ('symbol', '0')
- ('symbol', '2')
- define)
+ (symbol '0')
+ (symbol '2'))
(func
- ('symbol', 'reverse')
+ (symbol 'reverse')
(func
- ('symbol', 'all')
- None
- define)
- follow)
- define)
+ (symbol 'all')
+ None)))
* set:
<filteredset
<spanset+ 0:3>,
@@ -2341,32 +2263,28 @@
$ try --optimize '0:2 & sort(all(), -rev)'
(and
(range
- ('symbol', '0')
- ('symbol', '2'))
+ (symbol '0')
+ (symbol '2'))
(func
- ('symbol', 'sort')
+ (symbol 'sort')
(list
(func
- ('symbol', 'all')
+ (symbol 'all')
None)
(negate
- ('symbol', 'rev')))))
+ (symbol 'rev')))))
* optimized:
(and
(range
- ('symbol', '0')
- ('symbol', '2')
- define)
+ (symbol '0')
+ (symbol '2'))
(func
- ('symbol', 'sort')
+ (symbol 'sort')
(list
(func
- ('symbol', 'all')
- None
- define)
- ('string', '-rev'))
- follow)
- define)
+ (symbol 'all')
+ None)
+ (string '-rev'))))
* set:
<filteredset
<spanset+ 0:3>,
@@ -2389,29 +2307,25 @@
$ try --optimize '2:0 & first(1 + 0 + 2)'
(and
(range
- ('symbol', '2')
- ('symbol', '0'))
+ (symbol '2')
+ (symbol '0'))
(func
- ('symbol', 'first')
+ (symbol 'first')
(or
(list
- ('symbol', '1')
- ('symbol', '0')
- ('symbol', '2')))))
+ (symbol '1')
+ (symbol '0')
+ (symbol '2')))))
* optimized:
(and
(range
- ('symbol', '2')
- ('symbol', '0')
- define)
+ (symbol '2')
+ (symbol '0'))
(func
- ('symbol', 'first')
+ (symbol 'first')
(func
- ('symbol', '_list')
- ('string', '1\x000\x002')
- define)
- follow)
- define)
+ (symbol '_list')
+ (string '1\x000\x002'))))
* set:
<filteredset
<baseset [1]>,
@@ -2421,30 +2335,26 @@
$ try --optimize '2:0 & not last(0 + 2 + 1)'
(and
(range
- ('symbol', '2')
- ('symbol', '0'))
+ (symbol '2')
+ (symbol '0'))
(not
(func
- ('symbol', 'last')
+ (symbol 'last')
(or
(list
- ('symbol', '0')
- ('symbol', '2')
- ('symbol', '1'))))))
+ (symbol '0')
+ (symbol '2')
+ (symbol '1'))))))
* optimized:
(difference
(range
- ('symbol', '2')
- ('symbol', '0')
- define)
+ (symbol '2')
+ (symbol '0'))
(func
- ('symbol', 'last')
+ (symbol 'last')
(func
- ('symbol', '_list')
- ('string', '0\x002\x001')
- define)
- any)
- define)
+ (symbol '_list')
+ (string '0\x002\x001'))))
* set:
<filteredset
<spanset- 0:3>,
@@ -2458,71 +2368,60 @@
$ try --optimize '2:0 & (1 + 0 + 2):(0 + 2 + 1)'
(and
(range
- ('symbol', '2')
- ('symbol', '0'))
+ (symbol '2')
+ (symbol '0'))
(range
(group
(or
(list
- ('symbol', '1')
- ('symbol', '0')
- ('symbol', '2'))))
+ (symbol '1')
+ (symbol '0')
+ (symbol '2'))))
(group
(or
(list
- ('symbol', '0')
- ('symbol', '2')
- ('symbol', '1'))))))
+ (symbol '0')
+ (symbol '2')
+ (symbol '1'))))))
* optimized:
(and
(range
- ('symbol', '2')
- ('symbol', '0')
- define)
+ (symbol '2')
+ (symbol '0'))
(range
(func
- ('symbol', '_list')
- ('string', '1\x000\x002')
- define)
+ (symbol '_list')
+ (string '1\x000\x002'))
(func
- ('symbol', '_list')
- ('string', '0\x002\x001')
- define)
- follow)
- define)
+ (symbol '_list')
+ (string '0\x002\x001'))))
* set:
<filteredset
<spanset- 0:3>,
<baseset [1]>>
1
- 'A & B' can be rewritten as 'B & A' by weight, but that's fine as long as
- the ordering rule is determined before the rewrite; in this example,
- 'B' follows the order of the initial set, which is the same order as 'A'
- since 'A' also follows the order:
+ 'A & B' can be rewritten as 'flipand(B, A)' by weight.
$ try --optimize 'contains("glob:*") & (2 + 0 + 1)'
(and
(func
- ('symbol', 'contains')
- ('string', 'glob:*'))
+ (symbol 'contains')
+ (string 'glob:*'))
(group
(or
(list
- ('symbol', '2')
- ('symbol', '0')
- ('symbol', '1')))))
+ (symbol '2')
+ (symbol '0')
+ (symbol '1')))))
* optimized:
- (and
+ (andsmally
(func
- ('symbol', '_list')
- ('string', '2\x000\x001')
- follow)
+ (symbol 'contains')
+ (string 'glob:*'))
(func
- ('symbol', 'contains')
- ('string', 'glob:*')
- define)
- define)
+ (symbol '_list')
+ (string '2\x000\x001')))
* set:
<filteredset
<baseset+ [0, 1, 2]>,
@@ -2537,30 +2436,26 @@
$ try --optimize 'reverse(contains("glob:*")) & (0 + 2 + 1)'
(and
(func
- ('symbol', 'reverse')
+ (symbol 'reverse')
(func
- ('symbol', 'contains')
- ('string', 'glob:*')))
+ (symbol 'contains')
+ (string 'glob:*')))
(group
(or
(list
- ('symbol', '0')
- ('symbol', '2')
- ('symbol', '1')))))
+ (symbol '0')
+ (symbol '2')
+ (symbol '1')))))
* optimized:
- (and
+ (andsmally
(func
- ('symbol', '_list')
- ('string', '0\x002\x001')
- follow)
- (func
- ('symbol', 'reverse')
+ (symbol 'reverse')
(func
- ('symbol', 'contains')
- ('string', 'glob:*')
- define)
- define)
- define)
+ (symbol 'contains')
+ (string 'glob:*')))
+ (func
+ (symbol '_list')
+ (string '0\x002\x001')))
* set:
<filteredset
<baseset- [0, 1, 2]>,
@@ -2569,69 +2464,6 @@
1
0
- 'A + B' can be rewritten to 'B + A' by weight only when the order doesn't
- matter (e.g. 'X & (A + B)' can be 'X & (B + A)', but '(A + B) & X' can't):
-
- $ try -p optimized '0:2 & (reverse(contains("a")) + 2)'
- * optimized:
- (and
- (range
- ('symbol', '0')
- ('symbol', '2')
- define)
- (or
- (list
- ('symbol', '2')
- (func
- ('symbol', 'reverse')
- (func
- ('symbol', 'contains')
- ('string', 'a')
- define)
- follow))
- follow)
- define)
- * set:
- <filteredset
- <spanset+ 0:3>,
- <addset
- <baseset [2]>,
- <filteredset
- <fullreposet+ 0:10>,
- <contains 'a'>>>>
- 0
- 1
- 2
-
- $ try -p optimized '(reverse(contains("a")) + 2) & 0:2'
- * optimized:
- (and
- (range
- ('symbol', '0')
- ('symbol', '2')
- follow)
- (or
- (list
- (func
- ('symbol', 'reverse')
- (func
- ('symbol', 'contains')
- ('string', 'a')
- define)
- define)
- ('symbol', '2'))
- define)
- define)
- * set:
- <addset
- <filteredset
- <spanset- 0:3>,
- <contains 'a'>>,
- <baseset [2]>>
- 1
- 0
- 2
-
test sort revset
--------------------------------------------
@@ -2905,1609 +2737,3 @@
$ cd ..
$ cd repo
-
-test subtracting something from an addset
-
- $ log '(outgoing() or removes(a)) - removes(a)'
- 8
- 9
-
-test intersecting something with an addset
-
- $ log 'parents(outgoing() or removes(a))'
- 1
- 4
- 5
- 8
-
-test that `or` operation combines elements in the right order:
-
- $ log '3:4 or 2:5'
- 3
- 4
- 2
- 5
- $ log '3:4 or 5:2'
- 3
- 4
- 5
- 2
- $ log 'sort(3:4 or 2:5)'
- 2
- 3
- 4
- 5
- $ log 'sort(3:4 or 5:2)'
- 2
- 3
- 4
- 5
-
-test that more than one `-r`s are combined in the right order and deduplicated:
-
- $ hg log -T '{rev}\n' -r 3 -r 3 -r 4 -r 5:2 -r 'ancestors(4)'
- 3
- 4
- 5
- 2
- 0
- 1
-
-test that `or` operation skips duplicated revisions from right-hand side
-
- $ try 'reverse(1::5) or ancestors(4)'
- (or
- (list
- (func
- ('symbol', 'reverse')
- (dagrange
- ('symbol', '1')
- ('symbol', '5')))
- (func
- ('symbol', 'ancestors')
- ('symbol', '4'))))
- * set:
- <addset
- <baseset- [1, 3, 5]>,
- <generatorset+>>
- 5
- 3
- 1
- 0
- 2
- 4
- $ try 'sort(ancestors(4) or reverse(1::5))'
- (func
- ('symbol', 'sort')
- (or
- (list
- (func
- ('symbol', 'ancestors')
- ('symbol', '4'))
- (func
- ('symbol', 'reverse')
- (dagrange
- ('symbol', '1')
- ('symbol', '5'))))))
- * set:
- <addset+
- <generatorset+>,
- <baseset- [1, 3, 5]>>
- 0
- 1
- 2
- 3
- 4
- 5
-
-test optimization of trivial `or` operation
-
- $ try --optimize '0|(1)|"2"|-2|tip|null'
- (or
- (list
- ('symbol', '0')
- (group
- ('symbol', '1'))
- ('string', '2')
- (negate
- ('symbol', '2'))
- ('symbol', 'tip')
- ('symbol', 'null')))
- * optimized:
- (func
- ('symbol', '_list')
- ('string', '0\x001\x002\x00-2\x00tip\x00null')
- define)
- * set:
- <baseset [0, 1, 2, 8, 9, -1]>
- 0
- 1
- 2
- 8
- 9
- -1
-
- $ try --optimize '0|1|2:3'
- (or
- (list
- ('symbol', '0')
- ('symbol', '1')
- (range
- ('symbol', '2')
- ('symbol', '3'))))
- * optimized:
- (or
- (list
- (func
- ('symbol', '_list')
- ('string', '0\x001')
- define)
- (range
- ('symbol', '2')
- ('symbol', '3')
- define))
- define)
- * set:
- <addset
- <baseset [0, 1]>,
- <spanset+ 2:4>>
- 0
- 1
- 2
- 3
-
- $ try --optimize '0:1|2|3:4|5|6'
- (or
- (list
- (range
- ('symbol', '0')
- ('symbol', '1'))
- ('symbol', '2')
- (range
- ('symbol', '3')
- ('symbol', '4'))
- ('symbol', '5')
- ('symbol', '6')))
- * optimized:
- (or
- (list
- (range
- ('symbol', '0')
- ('symbol', '1')
- define)
- ('symbol', '2')
- (range
- ('symbol', '3')
- ('symbol', '4')
- define)
- (func
- ('symbol', '_list')
- ('string', '5\x006')
- define))
- define)
- * set:
- <addset
- <addset
- <spanset+ 0:2>,
- <baseset [2]>>,
- <addset
- <spanset+ 3:5>,
- <baseset [5, 6]>>>
- 0
- 1
- 2
- 3
- 4
- 5
- 6
-
-unoptimized `or` looks like this
-
- $ try --no-optimized -p analyzed '0|1|2|3|4'
- * analyzed:
- (or
- (list
- ('symbol', '0')
- ('symbol', '1')
- ('symbol', '2')
- ('symbol', '3')
- ('symbol', '4'))
- define)
- * set:
- <addset
- <addset
- <baseset [0]>,
- <baseset [1]>>,
- <addset
- <baseset [2]>,
- <addset
- <baseset [3]>,
- <baseset [4]>>>>
- 0
- 1
- 2
- 3
- 4
-
-test that `_list` should be narrowed by provided `subset`
-
- $ log '0:2 and (null|1|2|3)'
- 1
- 2
-
-test that `_list` should remove duplicates
-
- $ log '0|1|2|1|2|-1|tip'
- 0
- 1
- 2
- 9
-
-test unknown revision in `_list`
-
- $ log '0|unknown'
- abort: unknown revision 'unknown'!
- [255]
-
-test integer range in `_list`
-
- $ log '-1|-10'
- 9
- 0
-
- $ log '-10|-11'
- abort: unknown revision '-11'!
- [255]
-
- $ log '9|10'
- abort: unknown revision '10'!
- [255]
-
-test '0000' != '0' in `_list`
-
- $ log '0|0000'
- 0
- -1
-
-test ',' in `_list`
- $ log '0,1'
- hg: parse error: can't use a list in this context
- (see hg help "revsets.x or y")
- [255]
- $ try '0,1,2'
- (list
- ('symbol', '0')
- ('symbol', '1')
- ('symbol', '2'))
- hg: parse error: can't use a list in this context
- (see hg help "revsets.x or y")
- [255]
-
-test that chained `or` operations make balanced addsets
-
- $ try '0:1|1:2|2:3|3:4|4:5'
- (or
- (list
- (range
- ('symbol', '0')
- ('symbol', '1'))
- (range
- ('symbol', '1')
- ('symbol', '2'))
- (range
- ('symbol', '2')
- ('symbol', '3'))
- (range
- ('symbol', '3')
- ('symbol', '4'))
- (range
- ('symbol', '4')
- ('symbol', '5'))))
- * set:
- <addset
- <addset
- <spanset+ 0:2>,
- <spanset+ 1:3>>,
- <addset
- <spanset+ 2:4>,
- <addset
- <spanset+ 3:5>,
- <spanset+ 4:6>>>>
- 0
- 1
- 2
- 3
- 4
- 5
-
-no crash by empty group "()" while optimizing `or` operations
-
- $ try --optimize '0|()'
- (or
- (list
- ('symbol', '0')
- (group
- None)))
- * optimized:
- (or
- (list
- ('symbol', '0')
- None)
- define)
- hg: parse error: missing argument
- [255]
-
-test that chained `or` operations never eat up stack (issue4624)
-(uses `0:1` instead of `0` to avoid future optimization of trivial revisions)
-
- $ hg log -T '{rev}\n' -r `$PYTHON -c "print '+'.join(['0:1'] * 500)"`
- 0
- 1
-
-test that repeated `-r` options never eat up stack (issue4565)
-(uses `-r 0::1` to avoid possible optimization at old-style parser)
-
- $ hg log -T '{rev}\n' `$PYTHON -c "for i in xrange(500): print '-r 0::1 ',"`
- 0
- 1
-
-check that conversion to only works
- $ try --optimize '::3 - ::1'
- (minus
- (dagrangepre
- ('symbol', '3'))
- (dagrangepre
- ('symbol', '1')))
- * optimized:
- (func
- ('symbol', 'only')
- (list
- ('symbol', '3')
- ('symbol', '1'))
- define)
- * set:
- <baseset+ [3]>
- 3
- $ try --optimize 'ancestors(1) - ancestors(3)'
- (minus
- (func
- ('symbol', 'ancestors')
- ('symbol', '1'))
- (func
- ('symbol', 'ancestors')
- ('symbol', '3')))
- * optimized:
- (func
- ('symbol', 'only')
- (list
- ('symbol', '1')
- ('symbol', '3'))
- define)
- * set:
- <baseset+ []>
- $ try --optimize 'not ::2 and ::6'
- (and
- (not
- (dagrangepre
- ('symbol', '2')))
- (dagrangepre
- ('symbol', '6')))
- * optimized:
- (func
- ('symbol', 'only')
- (list
- ('symbol', '6')
- ('symbol', '2'))
- define)
- * set:
- <baseset+ [3, 4, 5, 6]>
- 3
- 4
- 5
- 6
- $ try --optimize 'ancestors(6) and not ancestors(4)'
- (and
- (func
- ('symbol', 'ancestors')
- ('symbol', '6'))
- (not
- (func
- ('symbol', 'ancestors')
- ('symbol', '4'))))
- * optimized:
- (func
- ('symbol', 'only')
- (list
- ('symbol', '6')
- ('symbol', '4'))
- define)
- * set:
- <baseset+ [3, 5, 6]>
- 3
- 5
- 6
-
-no crash by empty group "()" while optimizing to "only()"
-
- $ try --optimize '::1 and ()'
- (and
- (dagrangepre
- ('symbol', '1'))
- (group
- None))
- * optimized:
- (and
- None
- (func
- ('symbol', 'ancestors')
- ('symbol', '1')
- define)
- define)
- hg: parse error: missing argument
- [255]
-
-optimization to only() works only if ancestors() takes only one argument
-
- $ hg debugrevspec -p optimized 'ancestors(6) - ancestors(4, 1)'
- * optimized:
- (difference
- (func
- ('symbol', 'ancestors')
- ('symbol', '6')
- define)
- (func
- ('symbol', 'ancestors')
- (list
- ('symbol', '4')
- ('symbol', '1'))
- any)
- define)
- 0
- 1
- 3
- 5
- 6
- $ hg debugrevspec -p optimized 'ancestors(6, 1) - ancestors(4)'
- * optimized:
- (difference
- (func
- ('symbol', 'ancestors')
- (list
- ('symbol', '6')
- ('symbol', '1'))
- define)
- (func
- ('symbol', 'ancestors')
- ('symbol', '4')
- any)
- define)
- 5
- 6
-
-optimization disabled if keyword arguments passed (because we're too lazy
-to support it)
-
- $ hg debugrevspec -p optimized 'ancestors(set=6) - ancestors(set=4)'
- * optimized:
- (difference
- (func
- ('symbol', 'ancestors')
- (keyvalue
- ('symbol', 'set')
- ('symbol', '6'))
- define)
- (func
- ('symbol', 'ancestors')
- (keyvalue
- ('symbol', 'set')
- ('symbol', '4'))
- any)
- define)
- 3
- 5
- 6
-
-invalid function call should not be optimized to only()
-
- $ log '"ancestors"(6) and not ancestors(4)'
- hg: parse error: not a symbol
- [255]
-
- $ log 'ancestors(6) and not "ancestors"(4)'
- hg: parse error: not a symbol
- [255]
-
-we can use patterns when searching for tags
-
- $ log 'tag("1..*")'
- abort: tag '1..*' does not exist!
- [255]
- $ log 'tag("re:1..*")'
- 6
- $ log 'tag("re:[0-9].[0-9]")'
- 6
- $ log 'tag("literal:1.0")'
- 6
- $ log 'tag("re:0..*")'
-
- $ log 'tag(unknown)'
- abort: tag 'unknown' does not exist!
- [255]
- $ log 'tag("re:unknown")'
- $ log 'present(tag("unknown"))'
- $ log 'present(tag("re:unknown"))'
- $ log 'branch(unknown)'
- abort: unknown revision 'unknown'!
- [255]
- $ log 'branch("literal:unknown")'
- abort: branch 'unknown' does not exist!
- [255]
- $ log 'branch("re:unknown")'
- $ log 'present(branch("unknown"))'
- $ log 'present(branch("re:unknown"))'
- $ log 'user(bob)'
- 2
-
- $ log '4::8'
- 4
- 8
- $ log '4:8'
- 4
- 5
- 6
- 7
- 8
-
- $ log 'sort(!merge() & (modifies(b) | user(bob) | keyword(bug) | keyword(issue) & 1::9), "-date")'
- 4
- 2
- 5
-
- $ log 'not 0 and 0:2'
- 1
- 2
- $ log 'not 1 and 0:2'
- 0
- 2
- $ log 'not 2 and 0:2'
- 0
- 1
- $ log '(1 and 2)::'
- $ log '(1 and 2):'
- $ log '(1 and 2):3'
- $ log 'sort(head(), -rev)'
- 9
- 7
- 6
- 5
- 4
- 3
- 2
- 1
- 0
- $ log '4::8 - 8'
- 4
-
-matching() should preserve the order of the input set:
-
- $ log '(2 or 3 or 1) and matching(1 or 2 or 3)'
- 2
- 3
- 1
-
- $ log 'named("unknown")'
- abort: namespace 'unknown' does not exist!
- [255]
- $ log 'named("re:unknown")'
- abort: no namespace exists that match 'unknown'!
- [255]
- $ log 'present(named("unknown"))'
- $ log 'present(named("re:unknown"))'
-
- $ log 'tag()'
- 6
- $ log 'named("tags")'
- 6
-
-issue2437
-
- $ log '3 and p1(5)'
- 3
- $ log '4 and p2(6)'
- 4
- $ log '1 and parents(:2)'
- 1
- $ log '2 and children(1:)'
- 2
- $ log 'roots(all()) or roots(all())'
- 0
- $ hg debugrevspec 'roots(all()) or roots(all())'
- 0
- $ log 'heads(branch(é)) or heads(branch(é))'
- 9
- $ log 'ancestors(8) and (heads(branch("-a-b-c-")) or heads(branch(é)))'
- 4
-
-issue2654: report a parse error if the revset was not completely parsed
-
- $ log '1 OR 2'
- hg: parse error at 2: invalid token
- [255]
-
-or operator should preserve ordering:
- $ log 'reverse(2::4) or tip'
- 4
- 2
- 9
-
-parentrevspec
-
- $ log 'merge()^0'
- 6
- $ log 'merge()^'
- 5
- $ log 'merge()^1'
- 5
- $ log 'merge()^2'
- 4
- $ log '(not merge())^2'
- $ log 'merge()^^'
- 3
- $ log 'merge()^1^'
- 3
- $ log 'merge()^^^'
- 1
-
- $ hg debugrevspec -s '(merge() | 0)~-1'
- * set:
- <baseset+ [1, 7]>
- 1
- 7
- $ log 'merge()~-1'
- 7
- $ log 'tip~-1'
- $ log '(tip | merge())~-1'
- 7
- $ log 'merge()~0'
- 6
- $ log 'merge()~1'
- 5
- $ log 'merge()~2'
- 3
- $ log 'merge()~2^1'
- 1
- $ log 'merge()~3'
- 1
-
- $ log '(-3:tip)^'
- 4
- 6
- 8
-
- $ log 'tip^foo'
- hg: parse error: ^ expects a number 0, 1, or 2
- [255]
-
- $ log 'branchpoint()~-1'
- abort: revision in set has more than one child!
- [255]
-
-Bogus function gets suggestions
- $ log 'add()'
- hg: parse error: unknown identifier: add
- (did you mean adds?)
- [255]
- $ log 'added()'
- hg: parse error: unknown identifier: added
- (did you mean adds?)
- [255]
- $ log 'remo()'
- hg: parse error: unknown identifier: remo
- (did you mean one of remote, removes?)
- [255]
- $ log 'babar()'
- hg: parse error: unknown identifier: babar
- [255]
-
-Bogus function with a similar internal name doesn't suggest the internal name
- $ log 'matches()'
- hg: parse error: unknown identifier: matches
- (did you mean matching?)
- [255]
-
-Undocumented functions aren't suggested as similar either
- $ log 'tagged2()'
- hg: parse error: unknown identifier: tagged2
- [255]
-
-multiple revspecs
-
- $ hg log -r 'tip~1:tip' -r 'tip~2:tip~1' --template '{rev}\n'
- 8
- 9
- 4
- 5
- 6
- 7
-
-test usage in revpair (with "+")
-
-(real pair)
-
- $ hg diff -r 'tip^^' -r 'tip'
- diff -r 2326846efdab -r 24286f4ae135 .hgtags
- --- /dev/null Thu Jan 01 00:00:00 1970 +0000
- +++ b/.hgtags Thu Jan 01 00:00:00 1970 +0000
- @@ -0,0 +1,1 @@
- +e0cc66ef77e8b6f711815af4e001a6594fde3ba5 1.0
- $ hg diff -r 'tip^^::tip'
- diff -r 2326846efdab -r 24286f4ae135 .hgtags
- --- /dev/null Thu Jan 01 00:00:00 1970 +0000
- +++ b/.hgtags Thu Jan 01 00:00:00 1970 +0000
- @@ -0,0 +1,1 @@
- +e0cc66ef77e8b6f711815af4e001a6594fde3ba5 1.0
-
-(single rev)
-
- $ hg diff -r 'tip^' -r 'tip^'
- $ hg diff -r 'tip^:tip^'
-
-(single rev that does not looks like a range)
-
- $ hg diff -r 'tip^::tip^ or tip^'
- diff -r d5d0dcbdc4d9 .hgtags
- --- /dev/null Thu Jan 01 00:00:00 1970 +0000
- +++ b/.hgtags * (glob)
- @@ -0,0 +1,1 @@
- +e0cc66ef77e8b6f711815af4e001a6594fde3ba5 1.0
- $ hg diff -r 'tip^ or tip^'
- diff -r d5d0dcbdc4d9 .hgtags
- --- /dev/null Thu Jan 01 00:00:00 1970 +0000
- +++ b/.hgtags * (glob)
- @@ -0,0 +1,1 @@
- +e0cc66ef77e8b6f711815af4e001a6594fde3ba5 1.0
-
-(no rev)
-
- $ hg diff -r 'author("babar") or author("celeste")'
- abort: empty revision range
- [255]
-
-aliases:
-
- $ echo '[revsetalias]' >> .hg/hgrc
- $ echo 'm = merge()' >> .hg/hgrc
-(revset aliases can override builtin revsets)
- $ echo 'p2($1) = p1($1)' >> .hg/hgrc
- $ echo 'sincem = descendants(m)' >> .hg/hgrc
- $ echo 'd($1) = reverse(sort($1, date))' >> .hg/hgrc
- $ echo 'rs(ARG1, ARG2) = reverse(sort(ARG1, ARG2))' >> .hg/hgrc
- $ echo 'rs4(ARG1, ARGA, ARGB, ARG2) = reverse(sort(ARG1, ARG2))' >> .hg/hgrc
-
- $ try m
- ('symbol', 'm')
- * expanded:
- (func
- ('symbol', 'merge')
- None)
- * set:
- <filteredset
- <fullreposet+ 0:10>,
- <merge>>
- 6
-
- $ HGPLAIN=1
- $ export HGPLAIN
- $ try m
- ('symbol', 'm')
- abort: unknown revision 'm'!
- [255]
-
- $ HGPLAINEXCEPT=revsetalias
- $ export HGPLAINEXCEPT
- $ try m
- ('symbol', 'm')
- * expanded:
- (func
- ('symbol', 'merge')
- None)
- * set:
- <filteredset
- <fullreposet+ 0:10>,
- <merge>>
- 6
-
- $ unset HGPLAIN
- $ unset HGPLAINEXCEPT
-
- $ try 'p2(.)'
- (func
- ('symbol', 'p2')
- ('symbol', '.'))
- * expanded:
- (func
- ('symbol', 'p1')
- ('symbol', '.'))
- * set:
- <baseset+ [8]>
- 8
-
- $ HGPLAIN=1
- $ export HGPLAIN
- $ try 'p2(.)'
- (func
- ('symbol', 'p2')
- ('symbol', '.'))
- * set:
- <baseset+ []>
-
- $ HGPLAINEXCEPT=revsetalias
- $ export HGPLAINEXCEPT
- $ try 'p2(.)'
- (func
- ('symbol', 'p2')
- ('symbol', '.'))
- * expanded:
- (func
- ('symbol', 'p1')
- ('symbol', '.'))
- * set:
- <baseset+ [8]>
- 8
-
- $ unset HGPLAIN
- $ unset HGPLAINEXCEPT
-
-test alias recursion
-
- $ try sincem
- ('symbol', 'sincem')
- * expanded:
- (func
- ('symbol', 'descendants')
- (func
- ('symbol', 'merge')
- None))
- * set:
- <generatorset+>
- 6
- 7
-
-test infinite recursion
-
- $ echo 'recurse1 = recurse2' >> .hg/hgrc
- $ echo 'recurse2 = recurse1' >> .hg/hgrc
- $ try recurse1
- ('symbol', 'recurse1')
- hg: parse error: infinite expansion of revset alias "recurse1" detected
- [255]
-
- $ echo 'level1($1, $2) = $1 or $2' >> .hg/hgrc
- $ echo 'level2($1, $2) = level1($2, $1)' >> .hg/hgrc
- $ try "level2(level1(1, 2), 3)"
- (func
- ('symbol', 'level2')
- (list
- (func
- ('symbol', 'level1')
- (list
- ('symbol', '1')
- ('symbol', '2')))
- ('symbol', '3')))
- * expanded:
- (or
- (list
- ('symbol', '3')
- (or
- (list
- ('symbol', '1')
- ('symbol', '2')))))
- * set:
- <addset
- <baseset [3]>,
- <baseset [1, 2]>>
- 3
- 1
- 2
-
-test nesting and variable passing
-
- $ echo 'nested($1) = nested2($1)' >> .hg/hgrc
- $ echo 'nested2($1) = nested3($1)' >> .hg/hgrc
- $ echo 'nested3($1) = max($1)' >> .hg/hgrc
- $ try 'nested(2:5)'
- (func
- ('symbol', 'nested')
- (range
- ('symbol', '2')
- ('symbol', '5')))
- * expanded:
- (func
- ('symbol', 'max')
- (range
- ('symbol', '2')
- ('symbol', '5')))
- * set:
- <baseset
- <max
- <fullreposet+ 0:10>,
- <spanset+ 2:6>>>
- 5
-
-test chained `or` operations are flattened at parsing phase
-
- $ echo 'chainedorops($1, $2, $3) = $1|$2|$3' >> .hg/hgrc
- $ try 'chainedorops(0:1, 1:2, 2:3)'
- (func
- ('symbol', 'chainedorops')
- (list
- (range
- ('symbol', '0')
- ('symbol', '1'))
- (range
- ('symbol', '1')
- ('symbol', '2'))
- (range
- ('symbol', '2')
- ('symbol', '3'))))
- * expanded:
- (or
- (list
- (range
- ('symbol', '0')
- ('symbol', '1'))
- (range
- ('symbol', '1')
- ('symbol', '2'))
- (range
- ('symbol', '2')
- ('symbol', '3'))))
- * set:
- <addset
- <spanset+ 0:2>,
- <addset
- <spanset+ 1:3>,
- <spanset+ 2:4>>>
- 0
- 1
- 2
- 3
-
-test variable isolation, variable placeholders are rewritten as string
-then parsed and matched again as string. Check they do not leak too
-far away.
-
- $ echo 'injectparamasstring = max("$1")' >> .hg/hgrc
- $ echo 'callinjection($1) = descendants(injectparamasstring)' >> .hg/hgrc
- $ try 'callinjection(2:5)'
- (func
- ('symbol', 'callinjection')
- (range
- ('symbol', '2')
- ('symbol', '5')))
- * expanded:
- (func
- ('symbol', 'descendants')
- (func
- ('symbol', 'max')
- ('string', '$1')))
- abort: unknown revision '$1'!
- [255]
-
-test scope of alias expansion: 'universe' is expanded prior to 'shadowall(0)',
-but 'all()' should never be substituted to '0()'.
-
- $ echo 'universe = all()' >> .hg/hgrc
- $ echo 'shadowall(all) = all and universe' >> .hg/hgrc
- $ try 'shadowall(0)'
- (func
- ('symbol', 'shadowall')
- ('symbol', '0'))
- * expanded:
- (and
- ('symbol', '0')
- (func
- ('symbol', 'all')
- None))
- * set:
- <filteredset
- <baseset [0]>,
- <spanset+ 0:10>>
- 0
-
-test unknown reference:
-
- $ try "unknownref(0)" --config 'revsetalias.unknownref($1)=$1:$2'
- (func
- ('symbol', 'unknownref')
- ('symbol', '0'))
- abort: bad definition of revset alias "unknownref": invalid symbol '$2'
- [255]
-
- $ hg debugrevspec --debug --config revsetalias.anotherbadone='branch(' "tip"
- ('symbol', 'tip')
- warning: bad definition of revset alias "anotherbadone": at 7: not a prefix: end
- * set:
- <baseset [9]>
- 9
-
- $ try 'tip'
- ('symbol', 'tip')
- * set:
- <baseset [9]>
- 9
-
- $ hg debugrevspec --debug --config revsetalias.'bad name'='tip' "tip"
- ('symbol', 'tip')
- warning: bad declaration of revset alias "bad name": at 4: invalid token
- * set:
- <baseset [9]>
- 9
- $ echo 'strictreplacing($1, $10) = $10 or desc("$1")' >> .hg/hgrc
- $ try 'strictreplacing("foo", tip)'
- (func
- ('symbol', 'strictreplacing')
- (list
- ('string', 'foo')
- ('symbol', 'tip')))
- * expanded:
- (or
- (list
- ('symbol', 'tip')
- (func
- ('symbol', 'desc')
- ('string', '$1'))))
- * set:
- <addset
- <baseset [9]>,
- <filteredset
- <fullreposet+ 0:10>,
- <desc '$1'>>>
- 9
-
- $ try 'd(2:5)'
- (func
- ('symbol', 'd')
- (range
- ('symbol', '2')
- ('symbol', '5')))
- * expanded:
- (func
- ('symbol', 'reverse')
- (func
- ('symbol', 'sort')
- (list
- (range
- ('symbol', '2')
- ('symbol', '5'))
- ('symbol', 'date'))))
- * set:
- <baseset [4, 5, 3, 2]>
- 4
- 5
- 3
- 2
- $ try 'rs(2 or 3, date)'
- (func
- ('symbol', 'rs')
- (list
- (or
- (list
- ('symbol', '2')
- ('symbol', '3')))
- ('symbol', 'date')))
- * expanded:
- (func
- ('symbol', 'reverse')
- (func
- ('symbol', 'sort')
- (list
- (or
- (list
- ('symbol', '2')
- ('symbol', '3')))
- ('symbol', 'date'))))
- * set:
- <baseset [3, 2]>
- 3
- 2
- $ try 'rs()'
- (func
- ('symbol', 'rs')
- None)
- hg: parse error: invalid number of arguments: 0
- [255]
- $ try 'rs(2)'
- (func
- ('symbol', 'rs')
- ('symbol', '2'))
- hg: parse error: invalid number of arguments: 1
- [255]
- $ try 'rs(2, data, 7)'
- (func
- ('symbol', 'rs')
- (list
- ('symbol', '2')
- ('symbol', 'data')
- ('symbol', '7')))
- hg: parse error: invalid number of arguments: 3
- [255]
- $ try 'rs4(2 or 3, x, x, date)'
- (func
- ('symbol', 'rs4')
- (list
- (or
- (list
- ('symbol', '2')
- ('symbol', '3')))
- ('symbol', 'x')
- ('symbol', 'x')
- ('symbol', 'date')))
- * expanded:
- (func
- ('symbol', 'reverse')
- (func
- ('symbol', 'sort')
- (list
- (or
- (list
- ('symbol', '2')
- ('symbol', '3')))
- ('symbol', 'date'))))
- * set:
- <baseset [3, 2]>
- 3
- 2
-
-issue4553: check that revset aliases override existing hash prefix
-
- $ hg log -qr e
- 6:e0cc66ef77e8
-
- $ hg log -qr e --config revsetalias.e="all()"
- 0:2785f51eece5
- 1:d75937da8da0
- 2:5ed5505e9f1c
- 3:8528aa5637f2
- 4:2326846efdab
- 5:904fa392b941
- 6:e0cc66ef77e8
- 7:013af1973af4
- 8:d5d0dcbdc4d9
- 9:24286f4ae135
-
- $ hg log -qr e: --config revsetalias.e="0"
- 0:2785f51eece5
- 1:d75937da8da0
- 2:5ed5505e9f1c
- 3:8528aa5637f2
- 4:2326846efdab
- 5:904fa392b941
- 6:e0cc66ef77e8
- 7:013af1973af4
- 8:d5d0dcbdc4d9
- 9:24286f4ae135
-
- $ hg log -qr :e --config revsetalias.e="9"
- 0:2785f51eece5
- 1:d75937da8da0
- 2:5ed5505e9f1c
- 3:8528aa5637f2
- 4:2326846efdab
- 5:904fa392b941
- 6:e0cc66ef77e8
- 7:013af1973af4
- 8:d5d0dcbdc4d9
- 9:24286f4ae135
-
- $ hg log -qr e:
- 6:e0cc66ef77e8
- 7:013af1973af4
- 8:d5d0dcbdc4d9
- 9:24286f4ae135
-
- $ hg log -qr :e
- 0:2785f51eece5
- 1:d75937da8da0
- 2:5ed5505e9f1c
- 3:8528aa5637f2
- 4:2326846efdab
- 5:904fa392b941
- 6:e0cc66ef77e8
-
-issue2549 - correct optimizations
-
- $ try 'limit(1 or 2 or 3, 2) and not 2'
- (and
- (func
- ('symbol', 'limit')
- (list
- (or
- (list
- ('symbol', '1')
- ('symbol', '2')
- ('symbol', '3')))
- ('symbol', '2')))
- (not
- ('symbol', '2')))
- * set:
- <filteredset
- <baseset [1, 2]>,
- <not
- <baseset [2]>>>
- 1
- $ try 'max(1 or 2) and not 2'
- (and
- (func
- ('symbol', 'max')
- (or
- (list
- ('symbol', '1')
- ('symbol', '2'))))
- (not
- ('symbol', '2')))
- * set:
- <filteredset
- <baseset
- <max
- <fullreposet+ 0:10>,
- <baseset [1, 2]>>>,
- <not
- <baseset [2]>>>
- $ try 'min(1 or 2) and not 1'
- (and
- (func
- ('symbol', 'min')
- (or
- (list
- ('symbol', '1')
- ('symbol', '2'))))
- (not
- ('symbol', '1')))
- * set:
- <filteredset
- <baseset
- <min
- <fullreposet+ 0:10>,
- <baseset [1, 2]>>>,
- <not
- <baseset [1]>>>
- $ try 'last(1 or 2, 1) and not 2'
- (and
- (func
- ('symbol', 'last')
- (list
- (or
- (list
- ('symbol', '1')
- ('symbol', '2')))
- ('symbol', '1')))
- (not
- ('symbol', '2')))
- * set:
- <filteredset
- <baseset [2]>,
- <not
- <baseset [2]>>>
-
-issue4289 - ordering of built-ins
- $ hg log -M -q -r 3:2
- 3:8528aa5637f2
- 2:5ed5505e9f1c
-
-test revsets started with 40-chars hash (issue3669)
-
- $ ISSUE3669_TIP=`hg tip --template '{node}'`
- $ hg log -r "${ISSUE3669_TIP}" --template '{rev}\n'
- 9
- $ hg log -r "${ISSUE3669_TIP}^" --template '{rev}\n'
- 8
-
-test or-ed indirect predicates (issue3775)
-
- $ log '6 or 6^1' | sort
- 5
- 6
- $ log '6^1 or 6' | sort
- 5
- 6
- $ log '4 or 4~1' | sort
- 2
- 4
- $ log '4~1 or 4' | sort
- 2
- 4
- $ log '(0 or 2):(4 or 6) or 0 or 6' | sort
- 0
- 1
- 2
- 3
- 4
- 5
- 6
- $ log '0 or 6 or (0 or 2):(4 or 6)' | sort
- 0
- 1
- 2
- 3
- 4
- 5
- 6
-
-tests for 'remote()' predicate:
-#. (csets in remote) (id) (remote)
-1. less than local current branch "default"
-2. same with local specified "default"
-3. more than local specified specified
-
- $ hg clone --quiet -U . ../remote3
- $ cd ../remote3
- $ hg update -q 7
- $ echo r > r
- $ hg ci -Aqm 10
- $ log 'remote()'
- 7
- $ log 'remote("a-b-c-")'
- 2
- $ cd ../repo
- $ log 'remote(".a.b.c.", "../remote3")'
-
-tests for concatenation of strings/symbols by "##"
-
- $ try "278 ## '5f5' ## 1ee ## 'ce5'"
- (_concat
- (_concat
- (_concat
- ('symbol', '278')
- ('string', '5f5'))
- ('symbol', '1ee'))
- ('string', 'ce5'))
- * concatenated:
- ('string', '2785f51eece5')
- * set:
- <baseset [0]>
- 0
-
- $ echo 'cat4($1, $2, $3, $4) = $1 ## $2 ## $3 ## $4' >> .hg/hgrc
- $ try "cat4(278, '5f5', 1ee, 'ce5')"
- (func
- ('symbol', 'cat4')
- (list
- ('symbol', '278')
- ('string', '5f5')
- ('symbol', '1ee')
- ('string', 'ce5')))
- * expanded:
- (_concat
- (_concat
- (_concat
- ('symbol', '278')
- ('string', '5f5'))
- ('symbol', '1ee'))
- ('string', 'ce5'))
- * concatenated:
- ('string', '2785f51eece5')
- * set:
- <baseset [0]>
- 0
-
-(check concatenation in alias nesting)
-
- $ echo 'cat2($1, $2) = $1 ## $2' >> .hg/hgrc
- $ echo 'cat2x2($1, $2, $3, $4) = cat2($1 ## $2, $3 ## $4)' >> .hg/hgrc
- $ log "cat2x2(278, '5f5', 1ee, 'ce5')"
- 0
-
-(check operator priority)
-
- $ echo 'cat2n2($1, $2, $3, $4) = $1 ## $2 or $3 ## $4~2' >> .hg/hgrc
- $ log "cat2n2(2785f5, 1eece5, 24286f, 4ae135)"
- 0
- 4
-
- $ cd ..
-
-prepare repository that has "default" branches of multiple roots
-
- $ hg init namedbranch
- $ cd namedbranch
-
- $ echo default0 >> a
- $ hg ci -Aqm0
- $ echo default1 >> a
- $ hg ci -m1
-
- $ hg branch -q stable
- $ echo stable2 >> a
- $ hg ci -m2
- $ echo stable3 >> a
- $ hg ci -m3
-
- $ hg update -q null
- $ echo default4 >> a
- $ hg ci -Aqm4
- $ echo default5 >> a
- $ hg ci -m5
-
-"null" revision belongs to "default" branch (issue4683)
-
- $ log 'branch(null)'
- 0
- 1
- 4
- 5
-
-"null" revision belongs to "default" branch, but it shouldn't appear in set
-unless explicitly specified (issue4682)
-
- $ log 'children(branch(default))'
- 1
- 2
- 5
-
- $ cd ..
-
-test author/desc/keyword in problematic encoding
-# unicode: cp932:
-# u30A2 0x83 0x41(= 'A')
-# u30C2 0x83 0x61(= 'a')
-
- $ hg init problematicencoding
- $ cd problematicencoding
-
- $ $PYTHON > setup.sh <<EOF
- > print u'''
- > echo a > text
- > hg add text
- > hg --encoding utf-8 commit -u '\u30A2' -m none
- > echo b > text
- > hg --encoding utf-8 commit -u '\u30C2' -m none
- > echo c > text
- > hg --encoding utf-8 commit -u none -m '\u30A2'
- > echo d > text
- > hg --encoding utf-8 commit -u none -m '\u30C2'
- > '''.encode('utf-8')
- > EOF
- $ sh < setup.sh
-
-test in problematic encoding
- $ $PYTHON > test.sh <<EOF
- > print u'''
- > hg --encoding cp932 log --template '{rev}\\n' -r 'author(\u30A2)'
- > echo ====
- > hg --encoding cp932 log --template '{rev}\\n' -r 'author(\u30C2)'
- > echo ====
- > hg --encoding cp932 log --template '{rev}\\n' -r 'desc(\u30A2)'
- > echo ====
- > hg --encoding cp932 log --template '{rev}\\n' -r 'desc(\u30C2)'
- > echo ====
- > hg --encoding cp932 log --template '{rev}\\n' -r 'keyword(\u30A2)'
- > echo ====
- > hg --encoding cp932 log --template '{rev}\\n' -r 'keyword(\u30C2)'
- > '''.encode('cp932')
- > EOF
- $ sh < test.sh
- 0
- ====
- 1
- ====
- 2
- ====
- 3
- ====
- 0
- 2
- ====
- 1
- 3
-
-test error message of bad revset
- $ hg log -r 'foo\\'
- hg: parse error at 3: syntax error in revset 'foo\\'
- [255]
-
- $ cd ..
-
-Test that revset predicate of extension isn't loaded at failure of
-loading it
-
- $ cd repo
-
- $ cat <<EOF > $TESTTMP/custompredicate.py
- > from mercurial import error, registrar, revset
- >
- > revsetpredicate = registrar.revsetpredicate()
- >
- > @revsetpredicate('custom1()')
- > def custom1(repo, subset, x):
- > return revset.baseset([1])
- >
- > raise error.Abort('intentional failure of loading extension')
- > EOF
- $ cat <<EOF > .hg/hgrc
- > [extensions]
- > custompredicate = $TESTTMP/custompredicate.py
- > EOF
-
- $ hg debugrevspec "custom1()"
- *** failed to import extension custompredicate from $TESTTMP/custompredicate.py: intentional failure of loading extension
- hg: parse error: unknown identifier: custom1
- [255]
-
-Test repo.anyrevs with customized revset overrides
-
- $ cat > $TESTTMP/printprevset.py <<EOF
- > from mercurial import encoding
- > def reposetup(ui, repo):
- > alias = {}
- > p = encoding.environ.get('P')
- > if p:
- > alias['P'] = p
- > revs = repo.anyrevs(['P'], user=True, localalias=alias)
- > ui.write('P=%r' % list(revs))
- > EOF
-
- $ cat >> .hg/hgrc <<EOF
- > custompredicate = !
- > printprevset = $TESTTMP/printprevset.py
- > EOF
-
- $ hg --config revsetalias.P=1 log -r . -T '\n'
- P=[1]
- $ P=3 hg --config revsetalias.P=2 log -r . -T '\n'
- P=[3]
-
- $ cd ..
-
-Test obsstore related revsets
-
- $ hg init repo1
- $ cd repo1
- $ cat <<EOF >> .hg/hgrc
- > [experimental]
- > evolution = createmarkers
- > EOF
-
- $ hg debugdrawdag <<'EOS'
- > F G
- > |/ # split: B -> E, F
- > B C D E # amend: B -> C -> D
- > \|/ | # amend: F -> G
- > A A Z # amend: A -> Z
- > EOS
-
- $ hg log -r 'successors(Z)' -T '{desc}\n'
- Z
-
- $ hg log -r 'successors(F)' -T '{desc}\n'
- F
- G
-
- $ hg tag --remove --local C D E F G
-
- $ hg log -r 'successors(B)' -T '{desc}\n'
- B
- D
- E
- G
-
- $ hg log -r 'successors(B)' -T '{desc}\n' --hidden
- B
- C
- D
- E
- F
- G
-
- $ hg log -r 'successors(B)-obsolete()' -T '{desc}\n' --hidden
- D
- E
- G
-
- $ hg log -r 'successors(B+A)-divergent()' -T '{desc}\n'
- A
- Z
- B
-
- $ hg log -r 'successors(B+A)-divergent()-obsolete()' -T '{desc}\n'
- Z
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-revset2.t Sat Sep 30 07:52:48 2017 -0700
@@ -0,0 +1,1818 @@
+ $ HGENCODING=utf-8
+ $ export HGENCODING
+ $ cat >> $HGRCPATH << EOF
+ > [extensions]
+ > drawdag=$TESTDIR/drawdag.py
+ > EOF
+
+ $ try() {
+ > hg debugrevspec --debug "$@"
+ > }
+
+ $ log() {
+ > hg log --template '{rev}\n' -r "$1"
+ > }
+
+ $ hg init repo
+ $ cd repo
+
+ $ echo a > a
+ $ hg branch a
+ marked working directory as branch a
+ (branches are permanent and global, did you want a bookmark?)
+ $ hg ci -Aqm0
+
+ $ echo b > b
+ $ hg branch b
+ marked working directory as branch b
+ $ hg ci -Aqm1
+
+ $ rm a
+ $ hg branch a-b-c-
+ marked working directory as branch a-b-c-
+ $ hg ci -Aqm2 -u Bob
+
+ $ hg log -r "extra('branch', 'a-b-c-')" --template '{rev}\n'
+ 2
+ $ hg log -r "extra('branch')" --template '{rev}\n'
+ 0
+ 1
+ 2
+ $ hg log -r "extra('branch', 're:a')" --template '{rev} {branch}\n'
+ 0 a
+ 2 a-b-c-
+
+ $ hg co 1
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg branch +a+b+c+
+ marked working directory as branch +a+b+c+
+ $ hg ci -Aqm3
+
+ $ hg co 2 # interleave
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ echo bb > b
+ $ hg branch -- -a-b-c-
+ marked working directory as branch -a-b-c-
+ $ hg ci -Aqm4 -d "May 12 2005"
+
+ $ hg co 3
+ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg branch !a/b/c/
+ marked working directory as branch !a/b/c/
+ $ hg ci -Aqm"5 bug"
+
+ $ hg merge 4
+ 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ hg branch _a_b_c_
+ marked working directory as branch _a_b_c_
+ $ hg ci -Aqm"6 issue619"
+
+ $ hg branch .a.b.c.
+ marked working directory as branch .a.b.c.
+ $ hg ci -Aqm7
+
+ $ hg branch all
+ marked working directory as branch all
+
+ $ hg co 4
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg branch é
+ marked working directory as branch \xc3\xa9 (esc)
+ $ hg ci -Aqm9
+
+ $ hg tag -r6 1.0
+ $ hg bookmark -r6 xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+
+ $ hg clone --quiet -U -r 7 . ../remote1
+ $ hg clone --quiet -U -r 8 . ../remote2
+ $ echo "[paths]" >> .hg/hgrc
+ $ echo "default = ../remote1" >> .hg/hgrc
+
+test subtracting something from an addset
+
+ $ log '(outgoing() or removes(a)) - removes(a)'
+ 8
+ 9
+
+test intersecting something with an addset
+
+ $ log 'parents(outgoing() or removes(a))'
+ 1
+ 4
+ 5
+ 8
+
+test that `or` operation combines elements in the right order:
+
+ $ log '3:4 or 2:5'
+ 3
+ 4
+ 2
+ 5
+ $ log '3:4 or 5:2'
+ 3
+ 4
+ 5
+ 2
+ $ log 'sort(3:4 or 2:5)'
+ 2
+ 3
+ 4
+ 5
+ $ log 'sort(3:4 or 5:2)'
+ 2
+ 3
+ 4
+ 5
+
+test that more than one `-r`s are combined in the right order and deduplicated:
+
+ $ hg log -T '{rev}\n' -r 3 -r 3 -r 4 -r 5:2 -r 'ancestors(4)'
+ 3
+ 4
+ 5
+ 2
+ 0
+ 1
+
+test that `or` operation skips duplicated revisions from right-hand side
+
+ $ try 'reverse(1::5) or ancestors(4)'
+ (or
+ (list
+ (func
+ (symbol 'reverse')
+ (dagrange
+ (symbol '1')
+ (symbol '5')))
+ (func
+ (symbol 'ancestors')
+ (symbol '4'))))
+ * set:
+ <addset
+ <baseset- [1, 3, 5]>,
+ <generatorset+>>
+ 5
+ 3
+ 1
+ 0
+ 2
+ 4
+ $ try 'sort(ancestors(4) or reverse(1::5))'
+ (func
+ (symbol 'sort')
+ (or
+ (list
+ (func
+ (symbol 'ancestors')
+ (symbol '4'))
+ (func
+ (symbol 'reverse')
+ (dagrange
+ (symbol '1')
+ (symbol '5'))))))
+ * set:
+ <addset+
+ <generatorset+>,
+ <baseset- [1, 3, 5]>>
+ 0
+ 1
+ 2
+ 3
+ 4
+ 5
+
+test optimization of trivial `or` operation
+
+ $ try --optimize '0|(1)|"2"|-2|tip|null'
+ (or
+ (list
+ (symbol '0')
+ (group
+ (symbol '1'))
+ (string '2')
+ (negate
+ (symbol '2'))
+ (symbol 'tip')
+ (symbol 'null')))
+ * optimized:
+ (func
+ (symbol '_list')
+ (string '0\x001\x002\x00-2\x00tip\x00null'))
+ * set:
+ <baseset [0, 1, 2, 8, 9, -1]>
+ 0
+ 1
+ 2
+ 8
+ 9
+ -1
+
+ $ try --optimize '0|1|2:3'
+ (or
+ (list
+ (symbol '0')
+ (symbol '1')
+ (range
+ (symbol '2')
+ (symbol '3'))))
+ * optimized:
+ (or
+ (list
+ (func
+ (symbol '_list')
+ (string '0\x001'))
+ (range
+ (symbol '2')
+ (symbol '3'))))
+ * set:
+ <addset
+ <baseset [0, 1]>,
+ <spanset+ 2:4>>
+ 0
+ 1
+ 2
+ 3
+
+ $ try --optimize '0:1|2|3:4|5|6'
+ (or
+ (list
+ (range
+ (symbol '0')
+ (symbol '1'))
+ (symbol '2')
+ (range
+ (symbol '3')
+ (symbol '4'))
+ (symbol '5')
+ (symbol '6')))
+ * optimized:
+ (or
+ (list
+ (range
+ (symbol '0')
+ (symbol '1'))
+ (symbol '2')
+ (range
+ (symbol '3')
+ (symbol '4'))
+ (func
+ (symbol '_list')
+ (string '5\x006'))))
+ * set:
+ <addset
+ <addset
+ <spanset+ 0:2>,
+ <baseset [2]>>,
+ <addset
+ <spanset+ 3:5>,
+ <baseset [5, 6]>>>
+ 0
+ 1
+ 2
+ 3
+ 4
+ 5
+ 6
+
+unoptimized `or` looks like this
+
+ $ try --no-optimized -p analyzed '0|1|2|3|4'
+ * analyzed:
+ (or
+ (list
+ (symbol '0')
+ (symbol '1')
+ (symbol '2')
+ (symbol '3')
+ (symbol '4')))
+ * set:
+ <addset
+ <addset
+ <baseset [0]>,
+ <baseset [1]>>,
+ <addset
+ <baseset [2]>,
+ <addset
+ <baseset [3]>,
+ <baseset [4]>>>>
+ 0
+ 1
+ 2
+ 3
+ 4
+
+test that `_list` should be narrowed by provided `subset`
+
+ $ log '0:2 and (null|1|2|3)'
+ 1
+ 2
+
+test that `_list` should remove duplicates
+
+ $ log '0|1|2|1|2|-1|tip'
+ 0
+ 1
+ 2
+ 9
+
+test unknown revision in `_list`
+
+ $ log '0|unknown'
+ abort: unknown revision 'unknown'!
+ [255]
+
+test integer range in `_list`
+
+ $ log '-1|-10'
+ 9
+ 0
+
+ $ log '-10|-11'
+ abort: unknown revision '-11'!
+ [255]
+
+ $ log '9|10'
+ abort: unknown revision '10'!
+ [255]
+
+test '0000' != '0' in `_list`
+
+ $ log '0|0000'
+ 0
+ -1
+
+test ',' in `_list`
+ $ log '0,1'
+ hg: parse error: can't use a list in this context
+ (see hg help "revsets.x or y")
+ [255]
+ $ try '0,1,2'
+ (list
+ (symbol '0')
+ (symbol '1')
+ (symbol '2'))
+ hg: parse error: can't use a list in this context
+ (see hg help "revsets.x or y")
+ [255]
+
+test that chained `or` operations make balanced addsets
+
+ $ try '0:1|1:2|2:3|3:4|4:5'
+ (or
+ (list
+ (range
+ (symbol '0')
+ (symbol '1'))
+ (range
+ (symbol '1')
+ (symbol '2'))
+ (range
+ (symbol '2')
+ (symbol '3'))
+ (range
+ (symbol '3')
+ (symbol '4'))
+ (range
+ (symbol '4')
+ (symbol '5'))))
+ * set:
+ <addset
+ <addset
+ <spanset+ 0:2>,
+ <spanset+ 1:3>>,
+ <addset
+ <spanset+ 2:4>,
+ <addset
+ <spanset+ 3:5>,
+ <spanset+ 4:6>>>>
+ 0
+ 1
+ 2
+ 3
+ 4
+ 5
+
+no crash by empty group "()" while optimizing `or` operations
+
+ $ try --optimize '0|()'
+ (or
+ (list
+ (symbol '0')
+ (group
+ None)))
+ * optimized:
+ (or
+ (list
+ (symbol '0')
+ None))
+ hg: parse error: missing argument
+ [255]
+
+test that chained `or` operations never eat up stack (issue4624)
+(uses `0:1` instead of `0` to avoid future optimization of trivial revisions)
+
+ $ hg log -T '{rev}\n' -r `$PYTHON -c "print '+'.join(['0:1'] * 500)"`
+ 0
+ 1
+
+test that repeated `-r` options never eat up stack (issue4565)
+(uses `-r 0::1` to avoid possible optimization at old-style parser)
+
+ $ hg log -T '{rev}\n' `$PYTHON -c "for i in xrange(500): print '-r 0::1 ',"`
+ 0
+ 1
+
+check that conversion to only works
+ $ try --optimize '::3 - ::1'
+ (minus
+ (dagrangepre
+ (symbol '3'))
+ (dagrangepre
+ (symbol '1')))
+ * optimized:
+ (func
+ (symbol 'only')
+ (list
+ (symbol '3')
+ (symbol '1')))
+ * set:
+ <baseset+ [3]>
+ 3
+ $ try --optimize 'ancestors(1) - ancestors(3)'
+ (minus
+ (func
+ (symbol 'ancestors')
+ (symbol '1'))
+ (func
+ (symbol 'ancestors')
+ (symbol '3')))
+ * optimized:
+ (func
+ (symbol 'only')
+ (list
+ (symbol '1')
+ (symbol '3')))
+ * set:
+ <baseset+ []>
+ $ try --optimize 'not ::2 and ::6'
+ (and
+ (not
+ (dagrangepre
+ (symbol '2')))
+ (dagrangepre
+ (symbol '6')))
+ * optimized:
+ (func
+ (symbol 'only')
+ (list
+ (symbol '6')
+ (symbol '2')))
+ * set:
+ <baseset+ [3, 4, 5, 6]>
+ 3
+ 4
+ 5
+ 6
+ $ try --optimize 'ancestors(6) and not ancestors(4)'
+ (and
+ (func
+ (symbol 'ancestors')
+ (symbol '6'))
+ (not
+ (func
+ (symbol 'ancestors')
+ (symbol '4'))))
+ * optimized:
+ (func
+ (symbol 'only')
+ (list
+ (symbol '6')
+ (symbol '4')))
+ * set:
+ <baseset+ [3, 5, 6]>
+ 3
+ 5
+ 6
+
+no crash by empty group "()" while optimizing to "only()"
+
+ $ try --optimize '::1 and ()'
+ (and
+ (dagrangepre
+ (symbol '1'))
+ (group
+ None))
+ * optimized:
+ (andsmally
+ (func
+ (symbol 'ancestors')
+ (symbol '1'))
+ None)
+ hg: parse error: missing argument
+ [255]
+
+optimization to only() works only if ancestors() takes only one argument
+
+ $ hg debugrevspec -p optimized 'ancestors(6) - ancestors(4, 1)'
+ * optimized:
+ (difference
+ (func
+ (symbol 'ancestors')
+ (symbol '6'))
+ (func
+ (symbol 'ancestors')
+ (list
+ (symbol '4')
+ (symbol '1'))))
+ 0
+ 1
+ 3
+ 5
+ 6
+ $ hg debugrevspec -p optimized 'ancestors(6, 1) - ancestors(4)'
+ * optimized:
+ (difference
+ (func
+ (symbol 'ancestors')
+ (list
+ (symbol '6')
+ (symbol '1')))
+ (func
+ (symbol 'ancestors')
+ (symbol '4')))
+ 5
+ 6
+
+optimization disabled if keyword arguments passed (because we're too lazy
+to support it)
+
+ $ hg debugrevspec -p optimized 'ancestors(set=6) - ancestors(set=4)'
+ * optimized:
+ (difference
+ (func
+ (symbol 'ancestors')
+ (keyvalue
+ (symbol 'set')
+ (symbol '6')))
+ (func
+ (symbol 'ancestors')
+ (keyvalue
+ (symbol 'set')
+ (symbol '4'))))
+ 3
+ 5
+ 6
+
+invalid function call should not be optimized to only()
+
+ $ log '"ancestors"(6) and not ancestors(4)'
+ hg: parse error: not a symbol
+ [255]
+
+ $ log 'ancestors(6) and not "ancestors"(4)'
+ hg: parse error: not a symbol
+ [255]
+
+we can use patterns when searching for tags
+
+ $ log 'tag("1..*")'
+ abort: tag '1..*' does not exist!
+ [255]
+ $ log 'tag("re:1..*")'
+ 6
+ $ log 'tag("re:[0-9].[0-9]")'
+ 6
+ $ log 'tag("literal:1.0")'
+ 6
+ $ log 'tag("re:0..*")'
+
+ $ log 'tag(unknown)'
+ abort: tag 'unknown' does not exist!
+ [255]
+ $ log 'tag("re:unknown")'
+ $ log 'present(tag("unknown"))'
+ $ log 'present(tag("re:unknown"))'
+ $ log 'branch(unknown)'
+ abort: unknown revision 'unknown'!
+ [255]
+ $ log 'branch("literal:unknown")'
+ abort: branch 'unknown' does not exist!
+ [255]
+ $ log 'branch("re:unknown")'
+ $ log 'present(branch("unknown"))'
+ $ log 'present(branch("re:unknown"))'
+ $ log 'user(bob)'
+ 2
+
+ $ log '4::8'
+ 4
+ 8
+ $ log '4:8'
+ 4
+ 5
+ 6
+ 7
+ 8
+
+ $ log 'sort(!merge() & (modifies(b) | user(bob) | keyword(bug) | keyword(issue) & 1::9), "-date")'
+ 4
+ 2
+ 5
+
+ $ log 'not 0 and 0:2'
+ 1
+ 2
+ $ log 'not 1 and 0:2'
+ 0
+ 2
+ $ log 'not 2 and 0:2'
+ 0
+ 1
+ $ log '(1 and 2)::'
+ $ log '(1 and 2):'
+ $ log '(1 and 2):3'
+ $ log 'sort(head(), -rev)'
+ 9
+ 7
+ 6
+ 5
+ 4
+ 3
+ 2
+ 1
+ 0
+ $ log '4::8 - 8'
+ 4
+
+matching() should preserve the order of the input set:
+
+ $ log '(2 or 3 or 1) and matching(1 or 2 or 3)'
+ 2
+ 3
+ 1
+
+ $ log 'named("unknown")'
+ abort: namespace 'unknown' does not exist!
+ [255]
+ $ log 'named("re:unknown")'
+ abort: no namespace exists that match 'unknown'!
+ [255]
+ $ log 'present(named("unknown"))'
+ $ log 'present(named("re:unknown"))'
+
+ $ log 'tag()'
+ 6
+ $ log 'named("tags")'
+ 6
+
+issue2437
+
+ $ log '3 and p1(5)'
+ 3
+ $ log '4 and p2(6)'
+ 4
+ $ log '1 and parents(:2)'
+ 1
+ $ log '2 and children(1:)'
+ 2
+ $ log 'roots(all()) or roots(all())'
+ 0
+ $ hg debugrevspec 'roots(all()) or roots(all())'
+ 0
+ $ log 'heads(branch(é)) or heads(branch(é))'
+ 9
+ $ log 'ancestors(8) and (heads(branch("-a-b-c-")) or heads(branch(é)))'
+ 4
+
+issue2654: report a parse error if the revset was not completely parsed
+
+ $ log '1 OR 2'
+ hg: parse error at 2: invalid token
+ [255]
+
+or operator should preserve ordering:
+ $ log 'reverse(2::4) or tip'
+ 4
+ 2
+ 9
+
+parentrevspec
+
+ $ log 'merge()^0'
+ 6
+ $ log 'merge()^'
+ 5
+ $ log 'merge()^1'
+ 5
+ $ log 'merge()^2'
+ 4
+ $ log '(not merge())^2'
+ $ log 'merge()^^'
+ 3
+ $ log 'merge()^1^'
+ 3
+ $ log 'merge()^^^'
+ 1
+
+ $ hg debugrevspec -s '(merge() | 0)~-1'
+ * set:
+ <baseset+ [1, 7]>
+ 1
+ 7
+ $ log 'merge()~-1'
+ 7
+ $ log 'tip~-1'
+ $ log '(tip | merge())~-1'
+ 7
+ $ log 'merge()~0'
+ 6
+ $ log 'merge()~1'
+ 5
+ $ log 'merge()~2'
+ 3
+ $ log 'merge()~2^1'
+ 1
+ $ log 'merge()~3'
+ 1
+
+ $ log '(-3:tip)^'
+ 4
+ 6
+ 8
+
+ $ log 'tip^foo'
+ hg: parse error: ^ expects a number 0, 1, or 2
+ [255]
+
+ $ log 'branchpoint()~-1'
+ abort: revision in set has more than one child!
+ [255]
+
+Bogus function gets suggestions
+ $ log 'add()'
+ hg: parse error: unknown identifier: add
+ (did you mean adds?)
+ [255]
+ $ log 'added()'
+ hg: parse error: unknown identifier: added
+ (did you mean adds?)
+ [255]
+ $ log 'remo()'
+ hg: parse error: unknown identifier: remo
+ (did you mean one of remote, removes?)
+ [255]
+ $ log 'babar()'
+ hg: parse error: unknown identifier: babar
+ [255]
+
+Bogus function with a similar internal name doesn't suggest the internal name
+ $ log 'matches()'
+ hg: parse error: unknown identifier: matches
+ (did you mean matching?)
+ [255]
+
+Undocumented functions aren't suggested as similar either
+ $ log 'tagged2()'
+ hg: parse error: unknown identifier: tagged2
+ [255]
+
+multiple revspecs
+
+ $ hg log -r 'tip~1:tip' -r 'tip~2:tip~1' --template '{rev}\n'
+ 8
+ 9
+ 4
+ 5
+ 6
+ 7
+
+test usage in revpair (with "+")
+
+(real pair)
+
+ $ hg diff -r 'tip^^' -r 'tip'
+ diff -r 2326846efdab -r 24286f4ae135 .hgtags
+ --- /dev/null Thu Jan 01 00:00:00 1970 +0000
+ +++ b/.hgtags Thu Jan 01 00:00:00 1970 +0000
+ @@ -0,0 +1,1 @@
+ +e0cc66ef77e8b6f711815af4e001a6594fde3ba5 1.0
+ $ hg diff -r 'tip^^::tip'
+ diff -r 2326846efdab -r 24286f4ae135 .hgtags
+ --- /dev/null Thu Jan 01 00:00:00 1970 +0000
+ +++ b/.hgtags Thu Jan 01 00:00:00 1970 +0000
+ @@ -0,0 +1,1 @@
+ +e0cc66ef77e8b6f711815af4e001a6594fde3ba5 1.0
+
+(single rev)
+
+ $ hg diff -r 'tip^' -r 'tip^'
+ $ hg diff -r 'tip^:tip^'
+
+(single rev that does not looks like a range)
+
+ $ hg diff -r 'tip^::tip^ or tip^'
+ diff -r d5d0dcbdc4d9 .hgtags
+ --- /dev/null Thu Jan 01 00:00:00 1970 +0000
+ +++ b/.hgtags * (glob)
+ @@ -0,0 +1,1 @@
+ +e0cc66ef77e8b6f711815af4e001a6594fde3ba5 1.0
+ $ hg diff -r 'tip^ or tip^'
+ diff -r d5d0dcbdc4d9 .hgtags
+ --- /dev/null Thu Jan 01 00:00:00 1970 +0000
+ +++ b/.hgtags * (glob)
+ @@ -0,0 +1,1 @@
+ +e0cc66ef77e8b6f711815af4e001a6594fde3ba5 1.0
+
+(no rev)
+
+ $ hg diff -r 'author("babar") or author("celeste")'
+ abort: empty revision range
+ [255]
+
+aliases:
+
+ $ echo '[revsetalias]' >> .hg/hgrc
+ $ echo 'm = merge()' >> .hg/hgrc
+(revset aliases can override builtin revsets)
+ $ echo 'p2($1) = p1($1)' >> .hg/hgrc
+ $ echo 'sincem = descendants(m)' >> .hg/hgrc
+ $ echo 'd($1) = reverse(sort($1, date))' >> .hg/hgrc
+ $ echo 'rs(ARG1, ARG2) = reverse(sort(ARG1, ARG2))' >> .hg/hgrc
+ $ echo 'rs4(ARG1, ARGA, ARGB, ARG2) = reverse(sort(ARG1, ARG2))' >> .hg/hgrc
+
+ $ try m
+ (symbol 'm')
+ * expanded:
+ (func
+ (symbol 'merge')
+ None)
+ * set:
+ <filteredset
+ <fullreposet+ 0:10>,
+ <merge>>
+ 6
+
+ $ HGPLAIN=1
+ $ export HGPLAIN
+ $ try m
+ (symbol 'm')
+ abort: unknown revision 'm'!
+ [255]
+
+ $ HGPLAINEXCEPT=revsetalias
+ $ export HGPLAINEXCEPT
+ $ try m
+ (symbol 'm')
+ * expanded:
+ (func
+ (symbol 'merge')
+ None)
+ * set:
+ <filteredset
+ <fullreposet+ 0:10>,
+ <merge>>
+ 6
+
+ $ unset HGPLAIN
+ $ unset HGPLAINEXCEPT
+
+ $ try 'p2(.)'
+ (func
+ (symbol 'p2')
+ (symbol '.'))
+ * expanded:
+ (func
+ (symbol 'p1')
+ (symbol '.'))
+ * set:
+ <baseset+ [8]>
+ 8
+
+ $ HGPLAIN=1
+ $ export HGPLAIN
+ $ try 'p2(.)'
+ (func
+ (symbol 'p2')
+ (symbol '.'))
+ * set:
+ <baseset+ []>
+
+ $ HGPLAINEXCEPT=revsetalias
+ $ export HGPLAINEXCEPT
+ $ try 'p2(.)'
+ (func
+ (symbol 'p2')
+ (symbol '.'))
+ * expanded:
+ (func
+ (symbol 'p1')
+ (symbol '.'))
+ * set:
+ <baseset+ [8]>
+ 8
+
+ $ unset HGPLAIN
+ $ unset HGPLAINEXCEPT
+
+test alias recursion
+
+ $ try sincem
+ (symbol 'sincem')
+ * expanded:
+ (func
+ (symbol 'descendants')
+ (func
+ (symbol 'merge')
+ None))
+ * set:
+ <generatorset+>
+ 6
+ 7
+
+test infinite recursion
+
+ $ echo 'recurse1 = recurse2' >> .hg/hgrc
+ $ echo 'recurse2 = recurse1' >> .hg/hgrc
+ $ try recurse1
+ (symbol 'recurse1')
+ hg: parse error: infinite expansion of revset alias "recurse1" detected
+ [255]
+
+ $ echo 'level1($1, $2) = $1 or $2' >> .hg/hgrc
+ $ echo 'level2($1, $2) = level1($2, $1)' >> .hg/hgrc
+ $ try "level2(level1(1, 2), 3)"
+ (func
+ (symbol 'level2')
+ (list
+ (func
+ (symbol 'level1')
+ (list
+ (symbol '1')
+ (symbol '2')))
+ (symbol '3')))
+ * expanded:
+ (or
+ (list
+ (symbol '3')
+ (or
+ (list
+ (symbol '1')
+ (symbol '2')))))
+ * set:
+ <addset
+ <baseset [3]>,
+ <baseset [1, 2]>>
+ 3
+ 1
+ 2
+
+test nesting and variable passing
+
+ $ echo 'nested($1) = nested2($1)' >> .hg/hgrc
+ $ echo 'nested2($1) = nested3($1)' >> .hg/hgrc
+ $ echo 'nested3($1) = max($1)' >> .hg/hgrc
+ $ try 'nested(2:5)'
+ (func
+ (symbol 'nested')
+ (range
+ (symbol '2')
+ (symbol '5')))
+ * expanded:
+ (func
+ (symbol 'max')
+ (range
+ (symbol '2')
+ (symbol '5')))
+ * set:
+ <baseset
+ <max
+ <fullreposet+ 0:10>,
+ <spanset+ 2:6>>>
+ 5
+
+test chained `or` operations are flattened at parsing phase
+
+ $ echo 'chainedorops($1, $2, $3) = $1|$2|$3' >> .hg/hgrc
+ $ try 'chainedorops(0:1, 1:2, 2:3)'
+ (func
+ (symbol 'chainedorops')
+ (list
+ (range
+ (symbol '0')
+ (symbol '1'))
+ (range
+ (symbol '1')
+ (symbol '2'))
+ (range
+ (symbol '2')
+ (symbol '3'))))
+ * expanded:
+ (or
+ (list
+ (range
+ (symbol '0')
+ (symbol '1'))
+ (range
+ (symbol '1')
+ (symbol '2'))
+ (range
+ (symbol '2')
+ (symbol '3'))))
+ * set:
+ <addset
+ <spanset+ 0:2>,
+ <addset
+ <spanset+ 1:3>,
+ <spanset+ 2:4>>>
+ 0
+ 1
+ 2
+ 3
+
+test variable isolation, variable placeholders are rewritten as string
+then parsed and matched again as string. Check they do not leak too
+far away.
+
+ $ echo 'injectparamasstring = max("$1")' >> .hg/hgrc
+ $ echo 'callinjection($1) = descendants(injectparamasstring)' >> .hg/hgrc
+ $ try 'callinjection(2:5)'
+ (func
+ (symbol 'callinjection')
+ (range
+ (symbol '2')
+ (symbol '5')))
+ * expanded:
+ (func
+ (symbol 'descendants')
+ (func
+ (symbol 'max')
+ (string '$1')))
+ abort: unknown revision '$1'!
+ [255]
+
+test scope of alias expansion: 'universe' is expanded prior to 'shadowall(0)',
+but 'all()' should never be substituted to '0()'.
+
+ $ echo 'universe = all()' >> .hg/hgrc
+ $ echo 'shadowall(all) = all and universe' >> .hg/hgrc
+ $ try 'shadowall(0)'
+ (func
+ (symbol 'shadowall')
+ (symbol '0'))
+ * expanded:
+ (and
+ (symbol '0')
+ (func
+ (symbol 'all')
+ None))
+ * set:
+ <filteredset
+ <baseset [0]>,
+ <spanset+ 0:10>>
+ 0
+
+test unknown reference:
+
+ $ try "unknownref(0)" --config 'revsetalias.unknownref($1)=$1:$2'
+ (func
+ (symbol 'unknownref')
+ (symbol '0'))
+ abort: bad definition of revset alias "unknownref": invalid symbol '$2'
+ [255]
+
+ $ hg debugrevspec --debug --config revsetalias.anotherbadone='branch(' "tip"
+ (symbol 'tip')
+ warning: bad definition of revset alias "anotherbadone": at 7: not a prefix: end
+ * set:
+ <baseset [9]>
+ 9
+
+ $ try 'tip'
+ (symbol 'tip')
+ * set:
+ <baseset [9]>
+ 9
+
+ $ hg debugrevspec --debug --config revsetalias.'bad name'='tip' "tip"
+ (symbol 'tip')
+ warning: bad declaration of revset alias "bad name": at 4: invalid token
+ * set:
+ <baseset [9]>
+ 9
+ $ echo 'strictreplacing($1, $10) = $10 or desc("$1")' >> .hg/hgrc
+ $ try 'strictreplacing("foo", tip)'
+ (func
+ (symbol 'strictreplacing')
+ (list
+ (string 'foo')
+ (symbol 'tip')))
+ * expanded:
+ (or
+ (list
+ (symbol 'tip')
+ (func
+ (symbol 'desc')
+ (string '$1'))))
+ * set:
+ <addset
+ <baseset [9]>,
+ <filteredset
+ <fullreposet+ 0:10>,
+ <desc '$1'>>>
+ 9
+
+ $ try 'd(2:5)'
+ (func
+ (symbol 'd')
+ (range
+ (symbol '2')
+ (symbol '5')))
+ * expanded:
+ (func
+ (symbol 'reverse')
+ (func
+ (symbol 'sort')
+ (list
+ (range
+ (symbol '2')
+ (symbol '5'))
+ (symbol 'date'))))
+ * set:
+ <baseset [4, 5, 3, 2]>
+ 4
+ 5
+ 3
+ 2
+ $ try 'rs(2 or 3, date)'
+ (func
+ (symbol 'rs')
+ (list
+ (or
+ (list
+ (symbol '2')
+ (symbol '3')))
+ (symbol 'date')))
+ * expanded:
+ (func
+ (symbol 'reverse')
+ (func
+ (symbol 'sort')
+ (list
+ (or
+ (list
+ (symbol '2')
+ (symbol '3')))
+ (symbol 'date'))))
+ * set:
+ <baseset [3, 2]>
+ 3
+ 2
+ $ try 'rs()'
+ (func
+ (symbol 'rs')
+ None)
+ hg: parse error: invalid number of arguments: 0
+ [255]
+ $ try 'rs(2)'
+ (func
+ (symbol 'rs')
+ (symbol '2'))
+ hg: parse error: invalid number of arguments: 1
+ [255]
+ $ try 'rs(2, data, 7)'
+ (func
+ (symbol 'rs')
+ (list
+ (symbol '2')
+ (symbol 'data')
+ (symbol '7')))
+ hg: parse error: invalid number of arguments: 3
+ [255]
+ $ try 'rs4(2 or 3, x, x, date)'
+ (func
+ (symbol 'rs4')
+ (list
+ (or
+ (list
+ (symbol '2')
+ (symbol '3')))
+ (symbol 'x')
+ (symbol 'x')
+ (symbol 'date')))
+ * expanded:
+ (func
+ (symbol 'reverse')
+ (func
+ (symbol 'sort')
+ (list
+ (or
+ (list
+ (symbol '2')
+ (symbol '3')))
+ (symbol 'date'))))
+ * set:
+ <baseset [3, 2]>
+ 3
+ 2
+
+issue4553: check that revset aliases override existing hash prefix
+
+ $ hg log -qr e
+ 6:e0cc66ef77e8
+
+ $ hg log -qr e --config revsetalias.e="all()"
+ 0:2785f51eece5
+ 1:d75937da8da0
+ 2:5ed5505e9f1c
+ 3:8528aa5637f2
+ 4:2326846efdab
+ 5:904fa392b941
+ 6:e0cc66ef77e8
+ 7:013af1973af4
+ 8:d5d0dcbdc4d9
+ 9:24286f4ae135
+
+ $ hg log -qr e: --config revsetalias.e="0"
+ 0:2785f51eece5
+ 1:d75937da8da0
+ 2:5ed5505e9f1c
+ 3:8528aa5637f2
+ 4:2326846efdab
+ 5:904fa392b941
+ 6:e0cc66ef77e8
+ 7:013af1973af4
+ 8:d5d0dcbdc4d9
+ 9:24286f4ae135
+
+ $ hg log -qr :e --config revsetalias.e="9"
+ 0:2785f51eece5
+ 1:d75937da8da0
+ 2:5ed5505e9f1c
+ 3:8528aa5637f2
+ 4:2326846efdab
+ 5:904fa392b941
+ 6:e0cc66ef77e8
+ 7:013af1973af4
+ 8:d5d0dcbdc4d9
+ 9:24286f4ae135
+
+ $ hg log -qr e:
+ 6:e0cc66ef77e8
+ 7:013af1973af4
+ 8:d5d0dcbdc4d9
+ 9:24286f4ae135
+
+ $ hg log -qr :e
+ 0:2785f51eece5
+ 1:d75937da8da0
+ 2:5ed5505e9f1c
+ 3:8528aa5637f2
+ 4:2326846efdab
+ 5:904fa392b941
+ 6:e0cc66ef77e8
+
+issue2549 - correct optimizations
+
+ $ try 'limit(1 or 2 or 3, 2) and not 2'
+ (and
+ (func
+ (symbol 'limit')
+ (list
+ (or
+ (list
+ (symbol '1')
+ (symbol '2')
+ (symbol '3')))
+ (symbol '2')))
+ (not
+ (symbol '2')))
+ * set:
+ <filteredset
+ <baseset [1, 2]>,
+ <not
+ <baseset [2]>>>
+ 1
+ $ try 'max(1 or 2) and not 2'
+ (and
+ (func
+ (symbol 'max')
+ (or
+ (list
+ (symbol '1')
+ (symbol '2'))))
+ (not
+ (symbol '2')))
+ * set:
+ <filteredset
+ <baseset
+ <max
+ <fullreposet+ 0:10>,
+ <baseset [1, 2]>>>,
+ <not
+ <baseset [2]>>>
+ $ try 'min(1 or 2) and not 1'
+ (and
+ (func
+ (symbol 'min')
+ (or
+ (list
+ (symbol '1')
+ (symbol '2'))))
+ (not
+ (symbol '1')))
+ * set:
+ <filteredset
+ <baseset
+ <min
+ <fullreposet+ 0:10>,
+ <baseset [1, 2]>>>,
+ <not
+ <baseset [1]>>>
+ $ try 'last(1 or 2, 1) and not 2'
+ (and
+ (func
+ (symbol 'last')
+ (list
+ (or
+ (list
+ (symbol '1')
+ (symbol '2')))
+ (symbol '1')))
+ (not
+ (symbol '2')))
+ * set:
+ <filteredset
+ <baseset [2]>,
+ <not
+ <baseset [2]>>>
+
+issue4289 - ordering of built-ins
+ $ hg log -M -q -r 3:2
+ 3:8528aa5637f2
+ 2:5ed5505e9f1c
+
+test revsets started with 40-chars hash (issue3669)
+
+ $ ISSUE3669_TIP=`hg tip --template '{node}'`
+ $ hg log -r "${ISSUE3669_TIP}" --template '{rev}\n'
+ 9
+ $ hg log -r "${ISSUE3669_TIP}^" --template '{rev}\n'
+ 8
+
+test or-ed indirect predicates (issue3775)
+
+ $ log '6 or 6^1' | sort
+ 5
+ 6
+ $ log '6^1 or 6' | sort
+ 5
+ 6
+ $ log '4 or 4~1' | sort
+ 2
+ 4
+ $ log '4~1 or 4' | sort
+ 2
+ 4
+ $ log '(0 or 2):(4 or 6) or 0 or 6' | sort
+ 0
+ 1
+ 2
+ 3
+ 4
+ 5
+ 6
+ $ log '0 or 6 or (0 or 2):(4 or 6)' | sort
+ 0
+ 1
+ 2
+ 3
+ 4
+ 5
+ 6
+
+tests for 'remote()' predicate:
+#. (csets in remote) (id) (remote)
+1. less than local current branch "default"
+2. same with local specified "default"
+3. more than local specified specified
+
+ $ hg clone --quiet -U . ../remote3
+ $ cd ../remote3
+ $ hg update -q 7
+ $ echo r > r
+ $ hg ci -Aqm 10
+ $ log 'remote()'
+ 7
+ $ log 'remote("a-b-c-")'
+ 2
+ $ cd ../repo
+ $ log 'remote(".a.b.c.", "../remote3")'
+
+tests for concatenation of strings/symbols by "##"
+
+ $ try "278 ## '5f5' ## 1ee ## 'ce5'"
+ (_concat
+ (_concat
+ (_concat
+ (symbol '278')
+ (string '5f5'))
+ (symbol '1ee'))
+ (string 'ce5'))
+ * concatenated:
+ (string '2785f51eece5')
+ * set:
+ <baseset [0]>
+ 0
+
+ $ echo 'cat4($1, $2, $3, $4) = $1 ## $2 ## $3 ## $4' >> .hg/hgrc
+ $ try "cat4(278, '5f5', 1ee, 'ce5')"
+ (func
+ (symbol 'cat4')
+ (list
+ (symbol '278')
+ (string '5f5')
+ (symbol '1ee')
+ (string 'ce5')))
+ * expanded:
+ (_concat
+ (_concat
+ (_concat
+ (symbol '278')
+ (string '5f5'))
+ (symbol '1ee'))
+ (string 'ce5'))
+ * concatenated:
+ (string '2785f51eece5')
+ * set:
+ <baseset [0]>
+ 0
+
+(check concatenation in alias nesting)
+
+ $ echo 'cat2($1, $2) = $1 ## $2' >> .hg/hgrc
+ $ echo 'cat2x2($1, $2, $3, $4) = cat2($1 ## $2, $3 ## $4)' >> .hg/hgrc
+ $ log "cat2x2(278, '5f5', 1ee, 'ce5')"
+ 0
+
+(check operator priority)
+
+ $ echo 'cat2n2($1, $2, $3, $4) = $1 ## $2 or $3 ## $4~2' >> .hg/hgrc
+ $ log "cat2n2(2785f5, 1eece5, 24286f, 4ae135)"
+ 0
+ 4
+
+ $ cd ..
+
+prepare repository that has "default" branches of multiple roots
+
+ $ hg init namedbranch
+ $ cd namedbranch
+
+ $ echo default0 >> a
+ $ hg ci -Aqm0
+ $ echo default1 >> a
+ $ hg ci -m1
+
+ $ hg branch -q stable
+ $ echo stable2 >> a
+ $ hg ci -m2
+ $ echo stable3 >> a
+ $ hg ci -m3
+
+ $ hg update -q null
+ $ echo default4 >> a
+ $ hg ci -Aqm4
+ $ echo default5 >> a
+ $ hg ci -m5
+
+"null" revision belongs to "default" branch (issue4683)
+
+ $ log 'branch(null)'
+ 0
+ 1
+ 4
+ 5
+
+"null" revision belongs to "default" branch, but it shouldn't appear in set
+unless explicitly specified (issue4682)
+
+ $ log 'children(branch(default))'
+ 1
+ 2
+ 5
+
+ $ cd ..
+
+test author/desc/keyword in problematic encoding
+# unicode: cp932:
+# u30A2 0x83 0x41(= 'A')
+# u30C2 0x83 0x61(= 'a')
+
+ $ hg init problematicencoding
+ $ cd problematicencoding
+
+ $ $PYTHON > setup.sh <<EOF
+ > print u'''
+ > echo a > text
+ > hg add text
+ > hg --encoding utf-8 commit -u '\u30A2' -m none
+ > echo b > text
+ > hg --encoding utf-8 commit -u '\u30C2' -m none
+ > echo c > text
+ > hg --encoding utf-8 commit -u none -m '\u30A2'
+ > echo d > text
+ > hg --encoding utf-8 commit -u none -m '\u30C2'
+ > '''.encode('utf-8')
+ > EOF
+ $ sh < setup.sh
+
+test in problematic encoding
+ $ $PYTHON > test.sh <<EOF
+ > print u'''
+ > hg --encoding cp932 log --template '{rev}\\n' -r 'author(\u30A2)'
+ > echo ====
+ > hg --encoding cp932 log --template '{rev}\\n' -r 'author(\u30C2)'
+ > echo ====
+ > hg --encoding cp932 log --template '{rev}\\n' -r 'desc(\u30A2)'
+ > echo ====
+ > hg --encoding cp932 log --template '{rev}\\n' -r 'desc(\u30C2)'
+ > echo ====
+ > hg --encoding cp932 log --template '{rev}\\n' -r 'keyword(\u30A2)'
+ > echo ====
+ > hg --encoding cp932 log --template '{rev}\\n' -r 'keyword(\u30C2)'
+ > '''.encode('cp932')
+ > EOF
+ $ sh < test.sh
+ 0
+ ====
+ 1
+ ====
+ 2
+ ====
+ 3
+ ====
+ 0
+ 2
+ ====
+ 1
+ 3
+
+test error message of bad revset
+ $ hg log -r 'foo\\'
+ hg: parse error at 3: syntax error in revset 'foo\\'
+ [255]
+
+ $ cd ..
+
+Test that revset predicate of extension isn't loaded at failure of
+loading it
+
+ $ cd repo
+
+ $ cat <<EOF > $TESTTMP/custompredicate.py
+ > from mercurial import error, registrar, revset
+ >
+ > revsetpredicate = registrar.revsetpredicate()
+ >
+ > @revsetpredicate('custom1()')
+ > def custom1(repo, subset, x):
+ > return revset.baseset([1])
+ >
+ > raise error.Abort('intentional failure of loading extension')
+ > EOF
+ $ cat <<EOF > .hg/hgrc
+ > [extensions]
+ > custompredicate = $TESTTMP/custompredicate.py
+ > EOF
+
+ $ hg debugrevspec "custom1()"
+ *** failed to import extension custompredicate from $TESTTMP/custompredicate.py: intentional failure of loading extension
+ hg: parse error: unknown identifier: custom1
+ [255]
+
+Test repo.anyrevs with customized revset overrides
+
+ $ cat > $TESTTMP/printprevset.py <<EOF
+ > from mercurial import encoding, registrar
+ > cmdtable = {}
+ > command = registrar.command(cmdtable)
+ > @command('printprevset')
+ > def printprevset(ui, repo):
+ > alias = {}
+ > p = encoding.environ.get('P')
+ > if p:
+ > alias['P'] = p
+ > revs = repo.anyrevs(['P'], user=True, localalias=alias)
+ > ui.write('P=%r\n' % list(revs))
+ > EOF
+
+ $ cat >> .hg/hgrc <<EOF
+ > custompredicate = !
+ > printprevset = $TESTTMP/printprevset.py
+ > EOF
+
+ $ hg --config revsetalias.P=1 printprevset
+ P=[1]
+ $ P=3 hg --config revsetalias.P=2 printprevset
+ P=[3]
+
+ $ cd ..
+
+Test obsstore related revsets
+
+ $ hg init repo1
+ $ cd repo1
+ $ cat <<EOF >> .hg/hgrc
+ > [experimental]
+ > stabilization = createmarkers
+ > EOF
+
+ $ hg debugdrawdag <<'EOS'
+ > F G
+ > |/ # split: B -> E, F
+ > B C D E # amend: B -> C -> D
+ > \|/ | # amend: F -> G
+ > A A Z # amend: A -> Z
+ > EOS
+
+ $ hg log -r 'successors(Z)' -T '{desc}\n'
+ Z
+
+ $ hg log -r 'successors(F)' -T '{desc}\n'
+ F
+ G
+
+ $ hg tag --remove --local C D E F G
+
+ $ hg log -r 'successors(B)' -T '{desc}\n'
+ B
+ D
+ E
+ G
+
+ $ hg log -r 'successors(B)' -T '{desc}\n' --hidden
+ B
+ C
+ D
+ E
+ F
+ G
+
+ $ hg log -r 'successors(B)-obsolete()' -T '{desc}\n' --hidden
+ D
+ E
+ G
+
+ $ hg log -r 'successors(B+A)-contentdivergent()' -T '{desc}\n'
+ A
+ Z
+ B
+
+ $ hg log -r 'successors(B+A)-contentdivergent()-obsolete()' -T '{desc}\n'
+ Z
+
+Test `draft() & ::x` optimization
+
+ $ hg init $TESTTMP/repo2
+ $ cd $TESTTMP/repo2
+ $ hg debugdrawdag <<'EOS'
+ > P5 S1
+ > | |
+ > S2 | D3
+ > \|/
+ > P4
+ > |
+ > P3 D2
+ > | |
+ > P2 D1
+ > |/
+ > P1
+ > |
+ > P0
+ > EOS
+ $ hg phase --public -r P5
+ $ hg phase --force --secret -r S1+S2
+ $ hg log -G -T '{rev} {desc} {phase}' -r 'sort(all(), topo, topo.firstbranch=P5)'
+ o 8 P5 public
+ |
+ | o 10 S1 secret
+ | |
+ | o 7 D3 draft
+ |/
+ | o 9 S2 secret
+ |/
+ o 6 P4 public
+ |
+ o 5 P3 public
+ |
+ o 3 P2 public
+ |
+ | o 4 D2 draft
+ | |
+ | o 2 D1 draft
+ |/
+ o 1 P1 public
+ |
+ o 0 P0 public
+
+ $ hg debugrevspec --verify -p analyzed -p optimized 'draft() & ::(((S1+D1+P5)-D3)+S2)'
+ * analyzed:
+ (and
+ (func
+ (symbol 'draft')
+ None)
+ (func
+ (symbol 'ancestors')
+ (or
+ (list
+ (and
+ (or
+ (list
+ (symbol 'S1')
+ (symbol 'D1')
+ (symbol 'P5')))
+ (not
+ (symbol 'D3')))
+ (symbol 'S2')))))
+ * optimized:
+ (func
+ (symbol '_phaseandancestors')
+ (list
+ (symbol 'draft')
+ (or
+ (list
+ (difference
+ (func
+ (symbol '_list')
+ (string 'S1\x00D1\x00P5'))
+ (symbol 'D3'))
+ (symbol 'S2')))))
+ $ hg debugrevspec --verify -p analyzed -p optimized 'secret() & ::9'
+ * analyzed:
+ (and
+ (func
+ (symbol 'secret')
+ None)
+ (func
+ (symbol 'ancestors')
+ (symbol '9')))
+ * optimized:
+ (func
+ (symbol '_phaseandancestors')
+ (list
+ (symbol 'secret')
+ (symbol '9')))
+ $ hg debugrevspec --verify -p analyzed -p optimized '7 & ( (not public()) & ::(tag()) )'
+ * analyzed:
+ (and
+ (symbol '7')
+ (and
+ (not
+ (func
+ (symbol 'public')
+ None))
+ (func
+ (symbol 'ancestors')
+ (func
+ (symbol 'tag')
+ None))))
+ * optimized:
+ (and
+ (symbol '7')
+ (func
+ (symbol '_phaseandancestors')
+ (list
+ (symbol '_notpublic')
+ (func
+ (symbol 'tag')
+ None))))
+ $ hg debugrevspec --verify -p optimized '(not public()) & ancestors(S1+D2+P5, 1)'
+ * optimized:
+ (and
+ (func
+ (symbol '_notpublic')
+ None)
+ (func
+ (symbol 'ancestors')
+ (list
+ (func
+ (symbol '_list')
+ (string 'S1\x00D2\x00P5'))
+ (symbol '1'))))
+ $ hg debugrevspec --verify -p optimized '(not public()) & ancestors(S1+D2+P5, depth=1)'
+ * optimized:
+ (and
+ (func
+ (symbol '_notpublic')
+ None)
+ (func
+ (symbol 'ancestors')
+ (list
+ (func
+ (symbol '_list')
+ (string 'S1\x00D2\x00P5'))
+ (keyvalue
+ (symbol 'depth')
+ (symbol '1')))))
--- a/tests/test-run-tests.py Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-run-tests.py Sat Sep 30 07:52:48 2017 -0700
@@ -39,7 +39,8 @@
and output.endswith(b'\n')), 'missing newline'
assert not re.search(br'[^ \w\\/\r\n()*?]', expected + output), \
b'single backslash or unknown char'
- match = run_tests.TTest.linematch(expected, output)
+ test = run_tests.TTest(b'test-run-test.t', b'.', b'.')
+ match = test.linematch(expected, output)
if isinstance(match, str):
return 'special: ' + match
elif isinstance(match, bytes):
--- a/tests/test-run-tests.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-run-tests.t Sat Sep 30 07:52:48 2017 -0700
@@ -131,10 +131,9 @@
\x1b[38;5;34m+ bar*baz (glob)\x1b[39m (esc)
bar*bad (glob)
\x1b[38;5;124m- bar*baz (glob)\x1b[39m (esc)
-
- ERROR: test-failure.t output changed
+ \x1b[38;5;88mERROR: \x1b[39m\x1b[38;5;9mtest-failure.t\x1b[39m\x1b[38;5;88m output changed\x1b[39m (esc)
!
- Failed test-failure.t: output changed
+ \x1b[38;5;88mFailed \x1b[39m\x1b[38;5;9mtest-failure.t\x1b[39m\x1b[38;5;88m: output changed\x1b[39m (esc)
# Ran 1 tests, 0 skipped, 1 failed.
python hash seed: * (glob)
[1]
@@ -158,6 +157,73 @@
python hash seed: * (glob)
#endif
+ $ cat > test-failure.t << EOF
+ > $ true
+ > should go away (true !)
+ > $ true
+ > should stay (false !)
+ >
+ > Should remove first line, not second or third
+ > $ echo 'testing'
+ > baz*foo (glob) (true !)
+ > foobar*foo (glob) (false !)
+ > te*ting (glob) (true !)
+ >
+ > Should keep first two lines, remove third and last
+ > $ echo 'testing'
+ > test.ng (re) (true !)
+ > foo.ar (re) (false !)
+ > b.r (re) (true !)
+ > missing (?)
+ > awol (true !)
+ >
+ > The "missing" line should stay, even though awol is dropped
+ > $ echo 'testing'
+ > test.ng (re) (true !)
+ > foo.ar (?)
+ > awol
+ > missing (?)
+ > EOF
+ $ rt test-failure.t
+
+ --- $TESTTMP/test-failure.t
+ +++ $TESTTMP/test-failure.t.err
+ @@ -1,11 +1,9 @@
+ $ true
+ - should go away (true !)
+ $ true
+ should stay (false !)
+
+ Should remove first line, not second or third
+ $ echo 'testing'
+ - baz*foo (glob) (true !)
+ foobar*foo (glob) (false !)
+ te*ting (glob) (true !)
+
+ foo.ar (re) (false !)
+ missing (?)
+ @@ -13,13 +11,10 @@
+ $ echo 'testing'
+ test.ng (re) (true !)
+ foo.ar (re) (false !)
+ - b.r (re) (true !)
+ missing (?)
+ - awol (true !)
+
+ The "missing" line should stay, even though awol is dropped
+ $ echo 'testing'
+ test.ng (re) (true !)
+ foo.ar (?)
+ - awol
+ missing (?)
+
+ ERROR: test-failure.t output changed
+ !
+ Failed test-failure.t: output changed
+ # Ran 1 tests, 0 skipped, 1 failed.
+ python hash seed: * (glob)
+ [1]
+
basic failing test
$ cat > test-failure.t << EOF
> $ echo babar
@@ -757,6 +823,20 @@
2
#endif
+ $ cat >> test-cases.t <<'EOF'
+ > #if a
+ > $ NAME=A
+ > #else
+ > $ NAME=B
+ > #endif
+ > $ echo $NAME
+ > A (a !)
+ > B (b !)
+ > EOF
+ $ rt test-cases.t
+ ..
+ # Ran 2 tests, 0 skipped, 0 failed.
+
$ rm test-cases.t
(reinstall)
@@ -901,6 +981,19 @@
python hash seed: * (glob)
[1]
+Ensure that --test-list causes only the tests listed in that file to
+be executed.
+ $ echo test-success.t >> onlytest
+ $ rt --test-list=onlytest
+ .
+ # Ran 1 tests, 0 skipped, 0 failed.
+ $ echo test-bogus.t >> anothertest
+ $ rt --test-list=onlytest --test-list=anothertest
+ s.
+ Skipped test-bogus.t: Doesn't exist
+ # Ran 1 tests, 1 skipped, 0 failed.
+ $ rm onlytest anothertest
+
test for --json
==================
@@ -1205,6 +1298,58 @@
$ cd ..
+support bisecting a separate repo
+
+ $ hg init bisect-dependent
+ $ cd bisect-dependent
+ $ cat > test-bisect-dependent.t <<EOF
+ > $ tail -1 \$TESTDIR/../bisect/test-bisect.t
+ > pass
+ > EOF
+ $ hg commit -Am dependent test-bisect-dependent.t
+
+ $ rt --known-good-rev=0 test-bisect-dependent.t
+
+ --- $TESTTMP/anothertests/bisect-dependent/test-bisect-dependent.t
+ +++ $TESTTMP/anothertests/bisect-dependent/test-bisect-dependent.t.err
+ @@ -1,2 +1,2 @@
+ $ tail -1 $TESTDIR/../bisect/test-bisect.t
+ - pass
+ + fail
+
+ ERROR: test-bisect-dependent.t output changed
+ !
+ Failed test-bisect-dependent.t: output changed
+ Failed to identify failure point for test-bisect-dependent.t
+ # Ran 1 tests, 0 skipped, 1 failed.
+ python hash seed: * (glob)
+ [1]
+
+ $ rt --bisect-repo=../test-bisect test-bisect-dependent.t
+ Usage: run-tests.py [options] [tests]
+
+ run-tests.py: error: --bisect-repo cannot be used without --known-good-rev
+ [2]
+
+ $ rt --known-good-rev=0 --bisect-repo=../bisect test-bisect-dependent.t
+
+ --- $TESTTMP/anothertests/bisect-dependent/test-bisect-dependent.t
+ +++ $TESTTMP/anothertests/bisect-dependent/test-bisect-dependent.t.err
+ @@ -1,2 +1,2 @@
+ $ tail -1 $TESTDIR/../bisect/test-bisect.t
+ - pass
+ + fail
+
+ ERROR: test-bisect-dependent.t output changed
+ !
+ Failed test-bisect-dependent.t: output changed
+ test-bisect-dependent.t broken by 72cbf122d116 (bad)
+ # Ran 1 tests, 0 skipped, 1 failed.
+ python hash seed: * (glob)
+ [1]
+
+ $ cd ..
+
Test a broken #if statement doesn't break run-tests threading.
==============================================================
$ mkdir broken
--- a/tests/test-setdiscovery.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-setdiscovery.t Sat Sep 30 07:52:48 2017 -0700
@@ -349,7 +349,7 @@
$ cut -d' ' -f6- access.log | grep -v cmd=known # cmd=known uses random sampling
"GET /?cmd=capabilities HTTP/1.1" 200 -
"GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D513314ca8b3ae4dac8eec56966265b00fcf866db x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
- "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477 x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
+ "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477 x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
"GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
$ cat errors.log
--- a/tests/test-share.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-share.t Sat Sep 30 07:52:48 2017 -0700
@@ -326,7 +326,12 @@
$ cat > failpullbookmarks.py << EOF
> """A small extension that makes bookmark pulls fail, for testing"""
- > from mercurial import extensions, exchange, error
+ > from __future__ import absolute_import
+ > from mercurial import (
+ > error,
+ > exchange,
+ > extensions,
+ > )
> def _pullbookmarks(orig, pullop):
> orig(pullop)
> raise error.HookAbort('forced failure by extension')
--- a/tests/test-shelve.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-shelve.t Sat Sep 30 07:52:48 2017 -0700
@@ -342,6 +342,23 @@
warning: conflicts while merging a/a! (edit, then use 'hg resolve --mark')
unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
[1]
+ $ hg status -v
+ M a/a
+ M b.rename/b
+ M c.copy
+ R b/b
+ ? a/a.orig
+ # The repository is in an unfinished *unshelve* state.
+
+ # Unresolved merge conflicts:
+ #
+ # a/a (glob)
+ #
+ # To mark files as resolved: hg resolve --mark FILE
+
+ # To continue: hg unshelve --continue
+ # To abort: hg unshelve --abort
+
ensure that we have a merge with unresolved conflicts
@@ -679,7 +696,7 @@
$ cat >> $HGRCPATH << EOF
> [experimental]
- > evolution=createmarkers
+ > stabilization=createmarkers
> EOF
$ hg shelve
shelved as default
@@ -1100,8 +1117,8 @@
shelved as default
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ hg debugbundle .hg/shelved/*.hg
- Stream params: sortdict([('Compression', 'BZ')])
- changegroup -- "sortdict([('version', '02'), ('nbchanges', '1')])"
+ Stream params: {Compression: BZ}
+ changegroup -- {nbchanges: 1, version: 02}
45993d65fe9dc3c6d8764b9c3b07fa831ee7d92d
$ cd ..
@@ -1243,7 +1260,7 @@
unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
[1]
$ ls .hg/origbackups
- root.orig
+ root
$ rm -rf .hg/origbackups
test Abort unshelve always gets user out of the unshelved state
--- a/tests/test-show-stack.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-show-stack.t Sat Sep 30 07:52:48 2017 -0700
@@ -17,7 +17,7 @@
$ echo 0 > foo
$ hg -q commit -A -m 'commit 0'
$ hg show stack
- @ 9f171 commit 0
+ @ 9f17 commit 0
Stack displays multiple draft changesets
@@ -30,48 +30,48 @@
$ echo 4 > foo
$ hg commit -m 'commit 4'
$ hg show stack
- @ 2737b commit 4
- o d1a69 commit 3
- o 128c8 commit 2
- o 181cc commit 1
- o 9f171 commit 0
+ @ 2737 commit 4
+ o d1a6 commit 3
+ o 128c commit 2
+ o 181c commit 1
+ o 9f17 commit 0
Public parent of draft base is displayed, separated from stack
$ hg phase --public -r 0
$ hg show stack
- @ 2737b commit 4
- o d1a69 commit 3
- o 128c8 commit 2
- o 181cc commit 1
+ @ 2737 commit 4
+ o d1a6 commit 3
+ o 128c commit 2
+ o 181c commit 1
/ (stack base)
- o 9f171 commit 0
+ o 9f17 commit 0
$ hg phase --public -r 1
$ hg show stack
- @ 2737b commit 4
- o d1a69 commit 3
- o 128c8 commit 2
+ @ 2737 commit 4
+ o d1a6 commit 3
+ o 128c commit 2
/ (stack base)
- o 181cc commit 1
+ o 181c commit 1
Draft descendants are shown
$ hg -q up 2
$ hg show stack
- o 2737b commit 4
- o d1a69 commit 3
- @ 128c8 commit 2
+ o 2737 commit 4
+ o d1a6 commit 3
+ @ 128c commit 2
/ (stack base)
- o 181cc commit 1
+ o 181c commit 1
$ hg -q up 3
$ hg show stack
- o 2737b commit 4
- @ d1a69 commit 3
- o 128c8 commit 2
+ o 2737 commit 4
+ @ d1a6 commit 3
+ o 128c commit 2
/ (stack base)
- o 181cc commit 1
+ o 181c commit 1
working dir on public changeset should display special message
@@ -89,10 +89,10 @@
$ hg show stack
\ / (multiple children)
|
- o d1a69 commit 3
- @ 128c8 commit 2
+ o d1a6 commit 3
+ @ 128c commit 2
/ (stack base)
- o 181cc commit 1
+ o 181c commit 1
$ cd ..
@@ -117,9 +117,9 @@
TODO doesn't yet handle case where wdir is a draft merge
$ hg show stack
- @ 8ee90 merge heads
+ @ 8ee9 merge heads
/ (stack base)
- o 59478 head 1
+ o 5947 head 1
$ echo d1 > foo
$ hg commit -m 'draft 1'
@@ -127,10 +127,10 @@
$ hg commit -m 'draft 2'
$ hg show stack
- @ 430d5 draft 2
- o 787b1 draft 1
+ @ 430d draft 2
+ o 787b draft 1
/ (stack base)
- o 8ee90 merge heads
+ o 8ee9 merge heads
$ cd ..
@@ -156,36 +156,36 @@
Newer draft heads don't impact output
$ hg show stack
- @ eaffc draft 2
- o 2b218 draft 1
+ @ eaff draft 2
+ o 2b21 draft 1
/ (stack base)
- o b66bb base
+ o b66b base
Newer public heads are rendered
$ hg phase --public -r '::tip'
$ hg show stack
- o baa4b new 2
+ o baa4 new 2
/ (2 commits ahead)
:
: (stack head)
- : @ eaffc draft 2
- : o 2b218 draft 1
+ : @ eaff draft 2
+ : o 2b21 draft 1
:/ (stack base)
- o b66bb base
+ o b66b base
If rebase is available, we show a hint how to rebase to that head
$ hg --config extensions.rebase= show stack
- o baa4b new 2
- / (2 commits ahead; hg rebase --source 2b218 --dest baa4b)
+ o baa4 new 2
+ / (2 commits ahead; hg rebase --source 2b21 --dest baa4)
:
: (stack head)
- : @ eaffc draft 2
- : o 2b218 draft 1
+ : @ eaff draft 2
+ : o 2b21 draft 1
:/ (stack base)
- o b66bb base
+ o b66b base
Similar tests but for multiple heads
@@ -196,25 +196,25 @@
$ hg -q up 2
$ hg show stack
- o baa4b new 2
+ o baa4 new 2
/ (2 commits ahead)
- : o 9a848 new head 2
+ : o 9a84 new head 2
:/ (1 commits ahead)
:
: (stack head)
- : @ eaffc draft 2
- : o 2b218 draft 1
+ : @ eaff draft 2
+ : o 2b21 draft 1
:/ (stack base)
- o b66bb base
+ o b66b base
$ hg --config extensions.rebase= show stack
- o baa4b new 2
- / (2 commits ahead; hg rebase --source 2b218 --dest baa4b)
- : o 9a848 new head 2
- :/ (1 commits ahead; hg rebase --source 2b218 --dest 9a848)
+ o baa4 new 2
+ / (2 commits ahead; hg rebase --source 2b21 --dest baa4)
+ : o 9a84 new head 2
+ :/ (1 commits ahead; hg rebase --source 2b21 --dest 9a84)
:
: (stack head)
- : @ eaffc draft 2
- : o 2b218 draft 1
+ : @ eaff draft 2
+ : o 2b21 draft 1
:/ (stack base)
- o b66bb base
+ o b66b base
--- a/tests/test-show-work.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-show-work.t Sat Sep 30 07:52:48 2017 -0700
@@ -16,20 +16,20 @@
$ hg -q commit -A -m 'commit 0'
$ hg show work
- @ 9f171 commit 0
+ @ 9f17 commit 0
Even when it isn't the wdir
$ hg -q up null
$ hg show work
- o 9f171 commit 0
+ o 9f17 commit 0
Single changeset is still there when public because it is a head
$ hg phase --public -r 0
$ hg show work
- o 9f171 commit 0
+ o 9f17 commit 0
A draft child will show both it and public parent
@@ -38,8 +38,8 @@
$ hg commit -m 'commit 1'
$ hg show work
- @ 181cc commit 1
- o 9f171 commit 0
+ @ 181c commit 1
+ o 9f17 commit 0
Multiple draft children will be shown
@@ -47,16 +47,16 @@
$ hg commit -m 'commit 2'
$ hg show work
- @ 128c8 commit 2
- o 181cc commit 1
- o 9f171 commit 0
+ @ 128c commit 2
+ o 181c commit 1
+ o 9f17 commit 0
Bumping first draft changeset to public will hide its parent
$ hg phase --public -r 1
$ hg show work
- @ 128c8 commit 2
- o 181cc commit 1
+ @ 128c commit 2
+ o 181c commit 1
|
~
@@ -68,10 +68,10 @@
created new head
$ hg show work
- @ f0abc commit 3
- | o 128c8 commit 2
+ @ f0ab commit 3
+ | o 128c commit 2
|/
- o 181cc commit 1
+ o 181c commit 1
|
~
@@ -80,10 +80,10 @@
$ hg -q up null
$ hg show work
- o f0abc commit 3
- | o 128c8 commit 2
+ o f0ab commit 3
+ | o 128c commit 2
|/
- o 181cc commit 1
+ o 181c commit 1
|
~
@@ -95,13 +95,13 @@
created new head
$ hg show work
- @ 668ca commit 4
- | o f0abc commit 3
- | | o 128c8 commit 2
+ @ 668c commit 4
+ | o f0ab commit 3
+ | | o 128c commit 2
| |/
- | o 181cc commit 1
+ | o 181c commit 1
|/
- o 9f171 commit 0
+ o 9f17 commit 0
$ cd ..
@@ -126,11 +126,11 @@
$ hg commit -m 'commit 4'
$ hg show work
- @ f8dd3 (mybranch) commit 4
- o 90cfc (mybranch) commit 3
- | o 128c8 commit 2
+ @ f8dd (mybranch) commit 4
+ o 90cf (mybranch) commit 3
+ | o 128c commit 2
|/
- o 181cc commit 1
+ o 181c commit 1
|
~
@@ -157,11 +157,11 @@
$ hg bookmark mybook
$ hg show work
- @ cac82 (mybook) commit 4
- o f0abc commit 3
- | o 128c8 (@) commit 2
+ @ cac8 (mybook) commit 4
+ o f0ab commit 3
+ | o 128c (@) commit 2
|/
- o 181cc commit 1
+ o 181c commit 1
|
~
@@ -182,9 +182,9 @@
$ hg tag 0.2
$ hg show work
- @ 37582 Added tag 0.2 for changeset 6379c25b76f1
- o 6379c (0.2) commit 3
- o a2ad9 Added tag 0.1 for changeset 6a75536ea0b1
+ @ 3758 Added tag 0.2 for changeset 6379c25b76f1
+ o 6379 (0.2) commit 3
+ o a2ad Added tag 0.1 for changeset 6a75536ea0b1
|
~
@@ -205,15 +205,15 @@
$ hg commit -m 'commit 2'
$ hg show work
- @ 34834 (mybook) (mybranch) commit 2
- o 97fcc commit 1
+ @ 3483 (mybook) (mybranch) commit 2
+ o 97fc commit 1
Multiple bookmarks on same changeset render properly
$ hg book mybook2
$ hg show work
- @ 34834 (mybook mybook2) (mybranch) commit 2
- o 97fcc commit 1
+ @ 3483 (mybook mybook2) (mybranch) commit 2
+ o 97fc commit 1
$ cd ..
@@ -230,8 +230,38 @@
$ hg commit -m 'commit 3'
$ hg --config extensions.revnames=$TESTDIR/revnamesext.py show work
- @ 32f3e (r2) commit 3
- o 6a755 (r1) commit 2
- o 97fcc (r0) commit 1
+ @ 32f3 (r2) commit 3
+ o 6a75 (r1) commit 2
+ o 97fc (r0) commit 1
$ cd ..
+
+Prefix collision on hashes increases shortest node length
+
+ $ hg init hashcollision
+ $ cd hashcollision
+ $ echo 0 > a
+ $ hg -q commit -Am 0
+ $ for i in 17 1057 2857 4025; do
+ > hg -q up 0
+ > echo $i > a
+ > hg -q commit -m $i
+ > echo 0 > a
+ > hg commit -m "$i commit 2"
+ > done
+
+ $ hg show work
+ @ cfd04 4025 commit 2
+ o c562d 4025
+ | o 08048 2857 commit 2
+ | o c5623 2857
+ |/
+ | o 6a6b6 1057 commit 2
+ | o c5625 1057
+ |/
+ | o 96b4e 17 commit 2
+ | o 11424 17
+ |/
+ o b4e73 0
+
+ $ cd ..
--- a/tests/test-show.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-show.t Sat Sep 30 07:52:48 2017 -0700
@@ -95,8 +95,8 @@
$ hg bookmark a-longer-bookmark
$ hg show bookmarks
- * a-longer-bookmark 7b570
- book1 b757f
+ * a-longer-bookmark 7b57
+ book1 b757
A custom bookmarks template works
@@ -112,13 +112,15 @@
"active": true,
"bookmark": "a-longer-bookmark",
"longestbookmarklen": 17,
- "node": "7b5709ab64cbc34da9b4367b64afff47f2c4ee83"
+ "node": "7b5709ab64cbc34da9b4367b64afff47f2c4ee83",
+ "nodelen": 4
},
{
"active": false,
"bookmark": "book1",
"longestbookmarklen": 17,
- "node": "b757f780b8ffd71267c6ccb32e0882d9d32a8cc0"
+ "node": "b757f780b8ffd71267c6ccb32e0882d9d32a8cc0",
+ "nodelen": 4
}
]
@@ -136,19 +138,19 @@
(no bookmarks set)
$ hg --config commands.show.aliasprefix=sh shwork
- @ 7b570 commit for book2
- o b757f commit for book1
- o ba592 initial
+ @ 7b57 commit for book2
+ o b757 commit for book1
+ o ba59 initial
$ hg --config commands.show.aliasprefix='s sh' swork
- @ 7b570 commit for book2
- o b757f commit for book1
- o ba592 initial
+ @ 7b57 commit for book2
+ o b757 commit for book1
+ o ba59 initial
$ hg --config commands.show.aliasprefix='s sh' shwork
- @ 7b570 commit for book2
- o b757f commit for book1
- o ba592 initial
+ @ 7b57 commit for book2
+ o b757 commit for book1
+ o ba59 initial
The aliases don't appear in `hg config`
--- a/tests/test-sparse.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-sparse.t Sat Sep 30 07:52:48 2017 -0700
@@ -29,23 +29,50 @@
#if no-windows
$ hg debugsparse --include /foo/bar
- warning: paths cannot start with /, ignoring: ['/foo/bar']
+ abort: paths cannot be absolute
+ [255]
$ hg debugsparse --include '$TESTTMP/myrepo/hide'
$ hg debugsparse --include '/root'
- warning: paths cannot start with /, ignoring: ['/root']
+ abort: paths cannot be absolute
+ [255]
#else
TODO: See if this can be made to fail the same way as on Unix
$ hg debugsparse --include /c/foo/bar
- abort: c:/foo/bar not under root '$TESTTMP/myrepo' (glob)
+ abort: paths cannot be absolute
[255]
$ hg debugsparse --include '$TESTTMP/myrepo/hide'
$ hg debugsparse --include '/c/root'
- abort: c:/root not under root '$TESTTMP/myrepo' (glob)
+ abort: paths cannot be absolute
[255]
#endif
+Paths should be treated as cwd-relative, not repo-root-relative
+ $ mkdir subdir && cd subdir
+ $ hg debugsparse --include path
+ $ hg debugsparse
+ [include]
+ $TESTTMP/myrepo/hide
+ hide
+ subdir/path
+
+ $ cd ..
+ $ echo hello > subdir/file2.ext
+ $ cd subdir
+ $ hg debugsparse --include '**.ext' # let us test globs
+ $ hg debugsparse --include 'path:abspath' # and a path: pattern
+ $ cd ..
+ $ hg debugsparse
+ [include]
+ $TESTTMP/myrepo/hide
+ hide
+ path:abspath
+ subdir/**.ext
+ subdir/path
+
+ $ rm -rf subdir
+
Verify commiting while sparse includes other files
$ echo z > hide
--- a/tests/test-ssh-bundle1.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-ssh-bundle1.t Sat Sep 30 07:52:48 2017 -0700
@@ -464,8 +464,8 @@
running .* ".*/dummyssh" ['"]user@dummy['"] ('|")hg -R remote serve --stdio('|") (re)
sending hello command
sending between command
- remote: 355
- remote: capabilities: lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN
+ remote: 372
+ remote: capabilities: lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Aphases%3Dheads%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN
remote: 1
preparing listkeys for "bookmarks"
sending listkeys command
--- a/tests/test-ssh.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-ssh.t Sat Sep 30 07:52:48 2017 -0700
@@ -480,8 +480,8 @@
running .* ".*/dummyssh" ['"]user@dummy['"] ('|")hg -R remote serve --stdio('|") (re)
sending hello command
sending between command
- remote: 355
- remote: capabilities: lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN
+ remote: 372
+ remote: capabilities: lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Aphases%3Dheads%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN
remote: 1
query 1; heads
sending batch command
@@ -491,9 +491,9 @@
sending getbundle command
bundle2-input-bundle: with-transaction
bundle2-input-part: "listkeys" (params: 1 mandatory) supported
- bundle2-input-part: total payload size 15
- bundle2-input-part: "listkeys" (params: 1 mandatory) supported
bundle2-input-part: total payload size 45
+ bundle2-input-part: "phase-heads" supported
+ bundle2-input-part: total payload size 72
bundle2-input-bundle: 1 parts total
checking for updated bookmarks
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-status-terse.t Sat Sep 30 07:52:48 2017 -0700
@@ -0,0 +1,185 @@
+ $ mkdir folder
+ $ cd folder
+ $ hg init
+ $ mkdir x x/l x/m x/n x/l/u x/l/u/a
+ $ touch a b x/aa.o x/bb.o
+ $ hg status
+ ? a
+ ? b
+ ? x/aa.o
+ ? x/bb.o
+
+ $ hg status --terse u
+ ? a
+ ? b
+ ? x/
+ $ hg status --terse maudric
+ ? a
+ ? b
+ ? x/
+ $ hg status --terse madric
+ ? a
+ ? b
+ ? x/aa.o
+ ? x/bb.o
+ $ hg status --terse f
+ abort: 'f' not recognized
+ [255]
+
+Add a .hgignore so that we can also have ignored files
+
+ $ echo ".*\.o" > .hgignore
+ $ hg status
+ ? .hgignore
+ ? a
+ ? b
+ $ hg status -i
+ I x/aa.o
+ I x/bb.o
+
+Tersing ignored files
+ $ hg status -t i --ignored
+ I x/
+
+Adding more files
+ $ mkdir y
+ $ touch x/aa x/bb y/l y/m y/l.o y/m.o
+ $ touch x/l/aa x/m/aa x/n/aa x/l/u/bb x/l/u/a/bb
+
+ $ hg status
+ ? .hgignore
+ ? a
+ ? b
+ ? x/aa
+ ? x/bb
+ ? x/l/aa
+ ? x/l/u/a/bb
+ ? x/l/u/bb
+ ? x/m/aa
+ ? x/n/aa
+ ? y/l
+ ? y/m
+
+ $ hg status --terse u
+ ? .hgignore
+ ? a
+ ? b
+ ? x/
+ ? y/
+
+ $ hg add x/aa x/bb .hgignore
+ $ hg status --terse au
+ A .hgignore
+ A x/aa
+ A x/bb
+ ? a
+ ? b
+ ? x/l/
+ ? x/m/
+ ? x/n/
+ ? y/
+
+Including ignored files
+
+ $ hg status --terse aui
+ A .hgignore
+ A x/aa
+ A x/bb
+ ? a
+ ? b
+ ? x/l/
+ ? x/m/
+ ? x/n/
+ ? y/l
+ ? y/m
+ $ hg status --terse au -i
+ I x/aa.o
+ I x/bb.o
+ I y/l.o
+ I y/m.o
+
+Committing some of the files
+
+ $ hg commit x/aa x/bb .hgignore -m "First commit"
+ $ hg status
+ ? a
+ ? b
+ ? x/l/aa
+ ? x/l/u/a/bb
+ ? x/l/u/bb
+ ? x/m/aa
+ ? x/n/aa
+ ? y/l
+ ? y/m
+ $ hg status --terse mardu
+ ? a
+ ? b
+ ? x/l/
+ ? x/m/
+ ? x/n/
+ ? y/
+
+Modifying already committed files
+
+ $ echo "Hello" >> x/aa
+ $ echo "World" >> x/bb
+ $ hg status --terse maurdc
+ M x/aa
+ M x/bb
+ ? a
+ ? b
+ ? x/l/
+ ? x/m/
+ ? x/n/
+ ? y/
+
+Respecting other flags
+
+ $ hg status --terse marduic --all
+ M x/aa
+ M x/bb
+ ? a
+ ? b
+ ? x/l/
+ ? x/m/
+ ? x/n/
+ ? y/l
+ ? y/m
+ I x/aa.o
+ I x/bb.o
+ I y/l.o
+ I y/m.o
+ C .hgignore
+ $ hg status --terse marduic -a
+ $ hg status --terse marduic -c
+ C .hgignore
+ $ hg status --terse marduic -m
+ M x/aa
+ M x/bb
+
+Passing 'i' in terse value will consider the ignored files while tersing
+
+ $ hg status --terse marduic -u
+ ? a
+ ? b
+ ? x/l/
+ ? x/m/
+ ? x/n/
+ ? y/l
+ ? y/m
+
+Omitting 'i' in terse value does not consider ignored files while tersing
+
+ $ hg status --terse marduc -u
+ ? a
+ ? b
+ ? x/l/
+ ? x/m/
+ ? x/n/
+ ? y/
+
+Trying with --rev
+
+ $ hg status --terse marduic --rev 0 --rev 1
+ abort: cannot use --terse with --rev
+ [255]
--- a/tests/test-strip.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-strip.t Sat Sep 30 07:52:48 2017 -0700
@@ -211,10 +211,10 @@
summary: b
$ hg debugbundle .hg/strip-backup/*
- Stream params: sortdict([('Compression', 'BZ')])
- changegroup -- "sortdict([('version', '02'), ('nbchanges', '1')])"
+ Stream params: {Compression: BZ}
+ changegroup -- {nbchanges: 1, version: 02}
264128213d290d868c54642d13aeaa3675551a78
- phase-heads -- 'sortdict()'
+ phase-heads -- {}
264128213d290d868c54642d13aeaa3675551a78 draft
$ hg pull .hg/strip-backup/*
pulling from .hg/strip-backup/264128213d29-0b39d6bf-backup.hg
@@ -961,7 +961,8 @@
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ echo 3 >> I
$ cat > $TESTTMP/delayedstrip.py <<EOF
- > from mercurial import repair, commands
+ > from __future__ import absolute_import
+ > from mercurial import commands, repair
> def reposetup(ui, repo):
> def getnodes(expr):
> return [repo.changelog.node(r) for r in repo.revs(expr)]
@@ -1063,8 +1064,8 @@
$ cd $TESTTMP/scmutilcleanup.obsstore
$ cat >> .hg/hgrc <<EOF
> [experimental]
- > evolution=all
- > evolution.track-operation=1
+ > stabilization=all
+ > stabilization.track-operation=1
> EOF
$ hg log -r . -T '\n' --config extensions.t=$TESTTMP/scmutilcleanup.py
@@ -1111,11 +1112,11 @@
$ hg ci -Aqm a
$ hg ci --amend -m a2
$ hg debugobsolete
- cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b 489bac576828490c0bb8d45eac9e5e172e4ec0a8 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b 489bac576828490c0bb8d45eac9e5e172e4ec0a8 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
$ hg strip .
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
saved backup bundle to $TESTTMP/issue5678/.hg/strip-backup/489bac576828-bef27e14-backup.hg (glob)
$ hg unbundle -q .hg/strip-backup/*
$ hg debugobsolete
- cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b 489bac576828490c0bb8d45eac9e5e172e4ec0a8 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b 489bac576828490c0bb8d45eac9e5e172e4ec0a8 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
$ cd ..
--- a/tests/test-subrepo-git.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-subrepo-git.t Sat Sep 30 07:52:48 2017 -0700
@@ -885,9 +885,9 @@
$ hg revert --all --verbose --config 'ui.origbackuppath=.hg/origbackups'
reverting subrepo ../gitroot
creating directory: $TESTTMP/tc/.hg/origbackups (glob)
- saving current version of foobar as $TESTTMP/tc/.hg/origbackups/foobar.orig (glob)
+ saving current version of foobar as $TESTTMP/tc/.hg/origbackups/foobar (glob)
$ ls .hg/origbackups
- foobar.orig
+ foobar
$ rm -rf .hg/origbackups
show file at specific revision
--- a/tests/test-subrepo-missing.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-subrepo-missing.t Sat Sep 30 07:52:48 2017 -0700
@@ -76,7 +76,7 @@
> [phases]
> publish=False
> [experimental]
- > evolution=createmarkers
+ > stabilization=createmarkers
> EOF
check that we can update parent repo with missing (amended) subrepo revision
--- a/tests/test-tag.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-tag.t Sat Sep 30 07:52:48 2017 -0700
@@ -411,6 +411,59 @@
abort: cannot tag null revision
[255]
+issue5539: pruned tags do not appear in .hgtags
+
+ $ cat >> $HGRCPATH << EOF
+ > [experimental]
+ > stabilization=createmarkers,exchange
+ > EOF
+ $ hg up e4d483960b9b --quiet
+ $ echo aaa >>a
+ $ hg ci -maaa
+ $ hg log -r . -T "{node}\n"
+ 743b3afd5aa69f130c246806e48ad2e699f490d2
+ $ hg tag issue5539
+ hook: tag changes detected
+ hook: +A 743b3afd5aa69f130c246806e48ad2e699f490d2 issue5539
+ $ cat .hgtags
+ acb14030fe0a21b60322c440ad2d20cf7685a376 foobar
+ a0eea09de1eeec777b46f2085260a373b2fbc293 newline
+ 743b3afd5aa69f130c246806e48ad2e699f490d2 issue5539
+ $ hg log -r . -T "{node}\n"
+ abeb261f0508ecebcd345ce21e7a25112df417aa
+(mimic 'hg prune' command by obsoleting current changeset and then moving to its parent)
+ $ hg debugobsolete abeb261f0508ecebcd345ce21e7a25112df417aa --record-parents
+ obsoleted 1 changesets
+ $ hg up ".^" --quiet
+ $ cat .hgtags
+ acb14030fe0a21b60322c440ad2d20cf7685a376 foobar
+ a0eea09de1eeec777b46f2085260a373b2fbc293 newline
+ $ echo bbb >>a
+ $ hg ci -mbbb
+ $ hg log -r . -T "{node}\n"
+ 089dd20da58cae34165c37b064539c6ac0c6a0dd
+ $ hg tag issue5539
+ hook: tag changes detected
+ hook: -M 743b3afd5aa69f130c246806e48ad2e699f490d2 issue5539
+ hook: +M 089dd20da58cae34165c37b064539c6ac0c6a0dd issue5539
+ $ hg id
+ 0accf560a709 tip
+ $ cat .hgtags
+ acb14030fe0a21b60322c440ad2d20cf7685a376 foobar
+ a0eea09de1eeec777b46f2085260a373b2fbc293 newline
+ 089dd20da58cae34165c37b064539c6ac0c6a0dd issue5539
+ $ hg tags
+ tip 19:0accf560a709
+ issue5539 18:089dd20da58c
+ new-topo-head 13:0f26aaea6f74
+ baz 13:0f26aaea6f74
+ custom-tag 12:75a534207be6
+ tag-and-branch-same-name 11:fc93d2ea1cd7
+ newline 9:a0eea09de1ee
+ localnewline 8:c2899151f4e7
+ localblah 0:acb14030fe0a
+ foobar 0:acb14030fe0a
+
$ cd ..
tagging on an uncommitted merge (issue2542)
--- a/tests/test-tags.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-tags.t Sat Sep 30 07:52:48 2017 -0700
@@ -721,13 +721,13 @@
$ hg -R tagsclient bundle --all ./test-cache-in-bundle-all-rev.hg
4 changesets found
$ hg debugbundle ./test-cache-in-bundle-all-rev.hg
- Stream params: sortdict([('Compression', 'BZ')])
- changegroup -- "sortdict([('version', '02'), ('nbchanges', '4')])"
+ Stream params: {Compression: BZ}
+ changegroup -- {nbchanges: 4, version: 02}
96ee1d7354c4ad7372047672c36a1f561e3a6a4c
c4dab0c2fd337eb9191f80c3024830a4889a8f34
f63cc8fe54e4d326f8d692805d70e092f851ddb1
40f0358cb314c824a5929ee527308d90e023bc10
- hgtagsfnodes -- 'sortdict()'
+ hgtagsfnodes -- {}
Check that local clone includes cache data
--- a/tests/test-template-engine.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-template-engine.t Sat Sep 30 07:52:48 2017 -0700
@@ -10,7 +10,7 @@
> def process(self, t, map):
> tmpl = self.loader(t)
> for k, v in map.iteritems():
- > if k in ('templ', 'ctx', 'repo', 'revcache', 'cache'):
+ > if k in ('templ', 'ctx', 'repo', 'revcache', 'cache', 'troubles'):
> continue
> if hasattr(v, '__call__'):
> v = v(**map)
--- a/tests/test-terse-status.t Mon Sep 18 10:54:00 2017 -0700
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,185 +0,0 @@
- $ mkdir folder
- $ cd folder
- $ hg init
- $ mkdir x x/l x/m x/n x/l/u x/l/u/a
- $ touch a b x/aa.o x/bb.o
- $ hg status
- ? a
- ? b
- ? x/aa.o
- ? x/bb.o
-
- $ hg status --terse u
- ? a
- ? b
- ? x/
- $ hg status --terse maudric
- ? a
- ? b
- ? x/
- $ hg status --terse madric
- ? a
- ? b
- ? x/aa.o
- ? x/bb.o
- $ hg status --terse f
- abort: 'f' not recognized
- [255]
-
-Add a .hgignore so that we can also have ignored files
-
- $ echo ".*\.o" > .hgignore
- $ hg status
- ? .hgignore
- ? a
- ? b
- $ hg status -i
- I x/aa.o
- I x/bb.o
-
-Tersing ignored files
- $ hg status -t i --ignored
- I x/
-
-Adding more files
- $ mkdir y
- $ touch x/aa x/bb y/l y/m y/l.o y/m.o
- $ touch x/l/aa x/m/aa x/n/aa x/l/u/bb x/l/u/a/bb
-
- $ hg status
- ? .hgignore
- ? a
- ? b
- ? x/aa
- ? x/bb
- ? x/l/aa
- ? x/l/u/a/bb
- ? x/l/u/bb
- ? x/m/aa
- ? x/n/aa
- ? y/l
- ? y/m
-
- $ hg status --terse u
- ? .hgignore
- ? a
- ? b
- ? x/
- ? y/
-
- $ hg add x/aa x/bb .hgignore
- $ hg status --terse au
- A .hgignore
- A x/aa
- A x/bb
- ? a
- ? b
- ? x/l/
- ? x/m/
- ? x/n/
- ? y/
-
-Including ignored files
-
- $ hg status --terse aui
- A .hgignore
- A x/aa
- A x/bb
- ? a
- ? b
- ? x/l/
- ? x/m/
- ? x/n/
- ? y/l
- ? y/m
- $ hg status --terse au -i
- I x/aa.o
- I x/bb.o
- I y/l.o
- I y/m.o
-
-Committing some of the files
-
- $ hg commit x/aa x/bb .hgignore -m "First commit"
- $ hg status
- ? a
- ? b
- ? x/l/aa
- ? x/l/u/a/bb
- ? x/l/u/bb
- ? x/m/aa
- ? x/n/aa
- ? y/l
- ? y/m
- $ hg status --terse mardu
- ? a
- ? b
- ? x/l/
- ? x/m/
- ? x/n/
- ? y/
-
-Modifying already committed files
-
- $ echo "Hello" >> x/aa
- $ echo "World" >> x/bb
- $ hg status --terse maurdc
- M x/aa
- M x/bb
- ? a
- ? b
- ? x/l/
- ? x/m/
- ? x/n/
- ? y/
-
-Respecting other flags
-
- $ hg status --terse marduic --all
- M x/aa
- M x/bb
- ? a
- ? b
- ? x/l/
- ? x/m/
- ? x/n/
- ? y/l
- ? y/m
- I x/aa.o
- I x/bb.o
- I y/l.o
- I y/m.o
- C .hgignore
- $ hg status --terse marduic -a
- $ hg status --terse marduic -c
- C .hgignore
- $ hg status --terse marduic -m
- M x/aa
- M x/bb
-
-Passing 'i' in terse value will consider the ignored files while tersing
-
- $ hg status --terse marduic -u
- ? a
- ? b
- ? x/l/
- ? x/m/
- ? x/n/
- ? y/l
- ? y/m
-
-Omitting 'i' in terse value does not consider ignored files while tersing
-
- $ hg status --terse marduc -u
- ? a
- ? b
- ? x/l/
- ? x/m/
- ? x/n/
- ? y/
-
-Trying with --rev
-
- $ hg status --terse marduic --rev 0 --rev 1
- abort: cannot use --terse with --rev
- [255]
--- a/tests/test-transplant.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-transplant.t Sat Sep 30 07:52:48 2017 -0700
@@ -884,7 +884,7 @@
$ cat > $TESTTMP/abort.py <<EOF
> # emulate that patch.patch() is aborted at patching on "abort" file
- > from mercurial import extensions, patch as patchmod
+ > from mercurial import error, extensions, patch as patchmod
> def patch(orig, ui, repo, patchname,
> strip=1, prefix='', files=None,
> eolmode='strict', similarity=0):
@@ -894,7 +894,7 @@
> strip=strip, prefix=prefix, files=files,
> eolmode=eolmode, similarity=similarity)
> if 'abort' in files:
- > raise patchmod.PatchError('intentional error while patching')
+ > raise error.PatchError('intentional error while patching')
> return r
> def extsetup(ui):
> extensions.wrapfunction(patchmod, 'patch', patch)
--- a/tests/test-treediscovery.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-treediscovery.t Sat Sep 30 07:52:48 2017 -0700
@@ -520,7 +520,7 @@
"GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
"GET /?cmd=branches HTTP/1.1" 200 - x-hgarg-1:nodes=d8f638ac69e9ae8dea4f09f11d696546a912d961 x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
"GET /?cmd=between HTTP/1.1" 200 - x-hgarg-1:pairs=d8f638ac69e9ae8dea4f09f11d696546a912d961-d57206cc072a18317c1e381fb60aa31bd3401785 x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
- "GET /?cmd=changegroupsubset HTTP/1.1" 200 - x-hgarg-1:bases=d8f638ac69e9ae8dea4f09f11d696546a912d961&heads=d8f638ac69e9ae8dea4f09f11d696546a912d961 x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
+ "GET /?cmd=changegroupsubset HTTP/1.1" 200 - x-hgarg-1:bases=d8f638ac69e9ae8dea4f09f11d696546a912d961&heads=d8f638ac69e9ae8dea4f09f11d696546a912d961+2c8d5d5ec612be65cdfdeac78b7662ab1696324a x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
"GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
"GET /?cmd=capabilities HTTP/1.1" 200 -
"GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
--- a/tests/test-treemanifest.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-treemanifest.t Sat Sep 30 07:52:48 2017 -0700
@@ -862,7 +862,7 @@
$ hg commit -Aqm 'pre-empty commit'
$ hg rm z
$ hg commit --amend -m 'empty commit'
- saved backup bundle to $TESTTMP/grafted-dir-repo-clone/.hg/strip-backup/cb99d5717cea-de37743b-amend.hg (glob)
+ saved backup bundle to $TESTTMP/grafted-dir-repo-clone/.hg/strip-backup/cb99d5717cea-9e3b6b02-amend.hg (glob)
$ hg log -r 'tip + tip^' -T '{manifest}\n'
1:678d3574b88c
1:678d3574b88c
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-uncommit.t Sat Sep 30 07:52:48 2017 -0700
@@ -0,0 +1,384 @@
+Test uncommit - set up the config
+
+ $ cat >> $HGRCPATH <<EOF
+ > [experimental]
+ > evolution=createmarkers, allowunstable
+ > [extensions]
+ > uncommit =
+ > drawdag=$TESTDIR/drawdag.py
+ > EOF
+
+Build up a repo
+
+ $ hg init repo
+ $ cd repo
+ $ hg bookmark foo
+
+Help for uncommit
+
+ $ hg help uncommit
+ hg uncommit [OPTION]... [FILE]...
+
+ uncommit part or all of a local changeset
+
+ This command undoes the effect of a local commit, returning the affected
+ files to their uncommitted state. This means that files modified or
+ deleted in the changeset will be left unchanged, and so will remain
+ modified in the working directory.
+
+ (use 'hg help -e uncommit' to show help for the uncommit extension)
+
+ options ([+] can be repeated):
+
+ --keep allow an empty commit after uncommiting
+ -I --include PATTERN [+] include names matching the given patterns
+ -X --exclude PATTERN [+] exclude names matching the given patterns
+
+ (some details hidden, use --verbose to show complete help)
+
+Uncommit with no commits should fail
+
+ $ hg uncommit
+ abort: cannot uncommit null changeset
+ [255]
+
+Create some commits
+
+ $ touch files
+ $ hg add files
+ $ for i in a ab abc abcd abcde; do echo $i > files; echo $i > file-$i; hg add file-$i; hg commit -m "added file-$i"; done
+ $ ls
+ file-a
+ file-ab
+ file-abc
+ file-abcd
+ file-abcde
+ files
+
+ $ hg log -G -T '{rev}:{node} {desc}' --hidden
+ @ 4:6c4fd43ed714e7fcd8adbaa7b16c953c2e985b60 added file-abcde
+ |
+ o 3:6db330d65db434145c0b59d291853e9a84719b24 added file-abcd
+ |
+ o 2:abf2df566fc193b3ac34d946e63c1583e4d4732b added file-abc
+ |
+ o 1:69a232e754b08d568c4899475faf2eb44b857802 added file-ab
+ |
+ o 0:3004d2d9b50883c1538fc754a3aeb55f1b4084f6 added file-a
+
+Simple uncommit off the top, also moves bookmark
+
+ $ hg bookmark
+ * foo 4:6c4fd43ed714
+ $ hg uncommit
+ $ hg status
+ M files
+ A file-abcde
+ $ hg bookmark
+ * foo 3:6db330d65db4
+
+ $ hg log -G -T '{rev}:{node} {desc}' --hidden
+ x 4:6c4fd43ed714e7fcd8adbaa7b16c953c2e985b60 added file-abcde
+ |
+ @ 3:6db330d65db434145c0b59d291853e9a84719b24 added file-abcd
+ |
+ o 2:abf2df566fc193b3ac34d946e63c1583e4d4732b added file-abc
+ |
+ o 1:69a232e754b08d568c4899475faf2eb44b857802 added file-ab
+ |
+ o 0:3004d2d9b50883c1538fc754a3aeb55f1b4084f6 added file-a
+
+
+Recommit
+
+ $ hg commit -m 'new change abcde'
+ $ hg status
+ $ hg heads -T '{rev}:{node} {desc}'
+ 5:0c07a3ccda771b25f1cb1edbd02e683723344ef1 new change abcde (no-eol)
+
+Uncommit of non-existent and unchanged files has no effect
+ $ hg uncommit nothinghere
+ nothing to uncommit
+ [1]
+ $ hg status
+ $ hg uncommit file-abc
+ nothing to uncommit
+ [1]
+ $ hg status
+
+Try partial uncommit, also moves bookmark
+
+ $ hg bookmark
+ * foo 5:0c07a3ccda77
+ $ hg uncommit files
+ $ hg status
+ M files
+ $ hg bookmark
+ * foo 6:3727deee06f7
+ $ hg heads -T '{rev}:{node} {desc}'
+ 6:3727deee06f72f5ffa8db792ee299cf39e3e190b new change abcde (no-eol)
+ $ hg log -r . -p -T '{rev}:{node} {desc}'
+ 6:3727deee06f72f5ffa8db792ee299cf39e3e190b new change abcdediff -r 6db330d65db4 -r 3727deee06f7 file-abcde
+ --- /dev/null Thu Jan 01 00:00:00 1970 +0000
+ +++ b/file-abcde Thu Jan 01 00:00:00 1970 +0000
+ @@ -0,0 +1,1 @@
+ +abcde
+
+ $ hg log -G -T '{rev}:{node} {desc}' --hidden
+ @ 6:3727deee06f72f5ffa8db792ee299cf39e3e190b new change abcde
+ |
+ | x 5:0c07a3ccda771b25f1cb1edbd02e683723344ef1 new change abcde
+ |/
+ | x 4:6c4fd43ed714e7fcd8adbaa7b16c953c2e985b60 added file-abcde
+ |/
+ o 3:6db330d65db434145c0b59d291853e9a84719b24 added file-abcd
+ |
+ o 2:abf2df566fc193b3ac34d946e63c1583e4d4732b added file-abc
+ |
+ o 1:69a232e754b08d568c4899475faf2eb44b857802 added file-ab
+ |
+ o 0:3004d2d9b50883c1538fc754a3aeb55f1b4084f6 added file-a
+
+ $ hg commit -m 'update files for abcde'
+
+Uncommit with dirty state
+
+ $ echo "foo" >> files
+ $ cat files
+ abcde
+ foo
+ $ hg status
+ M files
+ $ hg uncommit
+ abort: uncommitted changes
+ [255]
+ $ hg uncommit files
+ $ cat files
+ abcde
+ foo
+ $ hg commit -m "files abcde + foo"
+
+Testing the 'experimental.uncommitondirtywdir' config
+
+ $ echo "bar" >> files
+ $ hg uncommit
+ abort: uncommitted changes
+ [255]
+ $ hg uncommit --config experimental.uncommitondirtywdir=True
+ $ hg commit -m "files abcde + foo"
+
+Uncommit in the middle of a stack, does not move bookmark
+
+ $ hg checkout '.^^^'
+ 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ (leaving bookmark foo)
+ $ hg log -r . -p -T '{rev}:{node} {desc}'
+ 2:abf2df566fc193b3ac34d946e63c1583e4d4732b added file-abcdiff -r 69a232e754b0 -r abf2df566fc1 file-abc
+ --- /dev/null Thu Jan 01 00:00:00 1970 +0000
+ +++ b/file-abc Thu Jan 01 00:00:00 1970 +0000
+ @@ -0,0 +1,1 @@
+ +abc
+ diff -r 69a232e754b0 -r abf2df566fc1 files
+ --- a/files Thu Jan 01 00:00:00 1970 +0000
+ +++ b/files Thu Jan 01 00:00:00 1970 +0000
+ @@ -1,1 +1,1 @@
+ -ab
+ +abc
+
+ $ hg bookmark
+ foo 9:48e5bd7cd583
+ $ hg uncommit
+ $ hg status
+ M files
+ A file-abc
+ $ hg heads -T '{rev}:{node} {desc}'
+ 9:48e5bd7cd583eb24164ef8b89185819c84c96ed7 files abcde + foo (no-eol)
+ $ hg bookmark
+ foo 9:48e5bd7cd583
+ $ hg commit -m 'new abc'
+ created new head
+
+Partial uncommit in the middle, does not move bookmark
+
+ $ hg checkout '.^'
+ 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg log -r . -p -T '{rev}:{node} {desc}'
+ 1:69a232e754b08d568c4899475faf2eb44b857802 added file-abdiff -r 3004d2d9b508 -r 69a232e754b0 file-ab
+ --- /dev/null Thu Jan 01 00:00:00 1970 +0000
+ +++ b/file-ab Thu Jan 01 00:00:00 1970 +0000
+ @@ -0,0 +1,1 @@
+ +ab
+ diff -r 3004d2d9b508 -r 69a232e754b0 files
+ --- a/files Thu Jan 01 00:00:00 1970 +0000
+ +++ b/files Thu Jan 01 00:00:00 1970 +0000
+ @@ -1,1 +1,1 @@
+ -a
+ +ab
+
+ $ hg bookmark
+ foo 9:48e5bd7cd583
+ $ hg uncommit file-ab
+ $ hg status
+ A file-ab
+
+ $ hg heads -T '{rev}:{node} {desc}\n'
+ 11:8eb87968f2edb7f27f27fe676316e179de65fff6 added file-ab
+ 10:5dc89ca4486f8a88716c5797fa9f498d13d7c2e1 new abc
+ 9:48e5bd7cd583eb24164ef8b89185819c84c96ed7 files abcde + foo
+
+ $ hg bookmark
+ foo 9:48e5bd7cd583
+ $ hg commit -m 'update ab'
+ $ hg status
+ $ hg heads -T '{rev}:{node} {desc}\n'
+ 12:f21039c59242b085491bb58f591afc4ed1c04c09 update ab
+ 10:5dc89ca4486f8a88716c5797fa9f498d13d7c2e1 new abc
+ 9:48e5bd7cd583eb24164ef8b89185819c84c96ed7 files abcde + foo
+
+ $ hg log -G -T '{rev}:{node} {desc}' --hidden
+ @ 12:f21039c59242b085491bb58f591afc4ed1c04c09 update ab
+ |
+ o 11:8eb87968f2edb7f27f27fe676316e179de65fff6 added file-ab
+ |
+ | o 10:5dc89ca4486f8a88716c5797fa9f498d13d7c2e1 new abc
+ | |
+ | | o 9:48e5bd7cd583eb24164ef8b89185819c84c96ed7 files abcde + foo
+ | | |
+ | | | x 8:83815831694b1271e9f207cb1b79b2b19275edcb files abcde + foo
+ | | |/
+ | | | x 7:0977fa602c2fd7d8427ed4e7ee15ea13b84c9173 update files for abcde
+ | | |/
+ | | o 6:3727deee06f72f5ffa8db792ee299cf39e3e190b new change abcde
+ | | |
+ | | | x 5:0c07a3ccda771b25f1cb1edbd02e683723344ef1 new change abcde
+ | | |/
+ | | | x 4:6c4fd43ed714e7fcd8adbaa7b16c953c2e985b60 added file-abcde
+ | | |/
+ | | o 3:6db330d65db434145c0b59d291853e9a84719b24 added file-abcd
+ | | |
+ | | x 2:abf2df566fc193b3ac34d946e63c1583e4d4732b added file-abc
+ | |/
+ | x 1:69a232e754b08d568c4899475faf2eb44b857802 added file-ab
+ |/
+ o 0:3004d2d9b50883c1538fc754a3aeb55f1b4084f6 added file-a
+
+Uncommit with draft parent
+
+ $ hg uncommit
+ $ hg phase -r .
+ 11: draft
+ $ hg commit -m 'update ab again'
+
+Uncommit with public parent
+
+ $ hg phase -p "::.^"
+ $ hg uncommit
+ $ hg phase -r .
+ 11: public
+
+Partial uncommit with public parent
+
+ $ echo xyz > xyz
+ $ hg add xyz
+ $ hg commit -m "update ab and add xyz"
+ $ hg uncommit xyz
+ $ hg status
+ A xyz
+ $ hg phase -r .
+ 15: draft
+ $ hg phase -r ".^"
+ 11: public
+
+Uncommit leaving an empty changeset
+
+ $ cd $TESTTMP
+ $ hg init repo1
+ $ cd repo1
+ $ hg debugdrawdag <<'EOS'
+ > Q
+ > |
+ > P
+ > EOS
+ $ hg up Q -q
+ $ hg uncommit --keep
+ $ hg log -G -T '{desc} FILES: {files}'
+ @ Q FILES:
+ |
+ | x Q FILES: Q
+ |/
+ o P FILES: P
+
+ $ hg status
+ A Q
+
+ $ cd ..
+ $ rm -rf repo1
+
+Testing uncommit while merge
+
+ $ hg init repo2
+ $ cd repo2
+
+Create some history
+
+ $ touch a
+ $ hg add a
+ $ for i in 1 2 3; do echo $i > a; hg commit -m "a $i"; done
+ $ hg checkout 0
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ touch b
+ $ hg add b
+ $ for i in 1 2 3; do echo $i > b; hg commit -m "b $i"; done
+ created new head
+ $ hg log -G -T '{rev}:{node} {desc}' --hidden
+ @ 5:2cd56cdde163ded2fbb16ba2f918c96046ab0bf2 b 3
+ |
+ o 4:c3a0d5bb3b15834ffd2ef9ef603e93ec65cf2037 b 2
+ |
+ o 3:49bb009ca26078726b8870f1edb29fae8f7618f5 b 1
+ |
+ | o 2:990982b7384266e691f1bc08ca36177adcd1c8a9 a 3
+ | |
+ | o 1:24d38e3cf160c7b6f5ffe82179332229886a6d34 a 2
+ |/
+ o 0:ea4e33293d4d274a2ba73150733c2612231f398c a 1
+
+
+Add and expect uncommit to fail on both merge working dir and merge changeset
+
+ $ hg merge 2
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+
+ $ hg uncommit
+ abort: outstanding uncommitted merge
+ [255]
+
+ $ hg uncommit --config experimental.uncommitondirtywdir=True
+ abort: cannot uncommit while merging
+ [255]
+
+ $ hg status
+ M a
+ $ hg commit -m 'merge a and b'
+
+ $ hg uncommit
+ abort: cannot uncommit merge changeset
+ [255]
+
+ $ hg status
+ $ hg log -G -T '{rev}:{node} {desc}' --hidden
+ @ 6:c03b9c37bc67bf504d4912061cfb527b47a63c6e merge a and b
+ |\
+ | o 5:2cd56cdde163ded2fbb16ba2f918c96046ab0bf2 b 3
+ | |
+ | o 4:c3a0d5bb3b15834ffd2ef9ef603e93ec65cf2037 b 2
+ | |
+ | o 3:49bb009ca26078726b8870f1edb29fae8f7618f5 b 1
+ | |
+ o | 2:990982b7384266e691f1bc08ca36177adcd1c8a9 a 3
+ | |
+ o | 1:24d38e3cf160c7b6f5ffe82179332229886a6d34 a 2
+ |/
+ o 0:ea4e33293d4d274a2ba73150733c2612231f398c a 1
+
--- a/tests/test-update-branches.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-update-branches.t Sat Sep 30 07:52:48 2017 -0700
@@ -473,7 +473,7 @@
> [ui]
> logtemplate={rev}:{node|short} {desc|firstline}
> [experimental]
- > evolution=createmarkers
+ > stabilization=createmarkers
> EOF
Test no-argument update to a successor of an obsoleted changeset
--- a/tests/test-update-dest.t Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-update-dest.t Sat Sep 30 07:52:48 2017 -0700
@@ -33,3 +33,16 @@
abort: update destination required by configuration
(use hg pull followed by hg update DEST)
[255]
+
+ $ cd ..
+
+update.requiredest should silent the "hg update" text after pull
+ $ hg init repo1
+ $ cd repo1
+ $ hg pull ../repo
+ pulling from ../repo
+ requesting all changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 2 changesets with 2 changes to 1 files
--- a/tests/test-wireproto.py Mon Sep 18 10:54:00 2017 -0700
+++ b/tests/test-wireproto.py Sat Sep 30 07:52:48 2017 -0700
@@ -19,7 +19,26 @@
def __init__(self, serverrepo):
self.serverrepo = serverrepo
- def _capabilities(self):
+ @property
+ def ui(self):
+ return self.serverrepo.ui
+
+ def url(self):
+ return 'test'
+
+ def local(self):
+ return None
+
+ def peer(self):
+ return self
+
+ def canpush(self):
+ return True
+
+ def close(self):
+ pass
+
+ def capabilities(self):
return ['batch']
def _call(self, cmd, **args):
@@ -55,7 +74,7 @@
clt = clientpeer(srv)
print(clt.greet("Foobar"))
-b = clt.batch()
-fs = [b.greet(s) for s in ["Fo, =;:<o", "Bar"]]
+b = clt.iterbatch()
+map(b.greet, ('Fo, =;:<o', 'Bar'))
b.submit()
-print([f.value for f in fs])
+print([r for r in b.results()])